diff --git a/cypress/e2e/surveys.cy.ts b/cypress/e2e/surveys.cy.ts index 1cccfb545fc57..604c182a51a18 100644 --- a/cypress/e2e/surveys.cy.ts +++ b/cypress/e2e/surveys.cy.ts @@ -100,7 +100,7 @@ describe('Surveys', () => { cy.get('[data-attr=prop-val]').click({ force: true }) cy.get('[data-attr=prop-val-0]').click({ force: true }) - cy.get('[data-attr="rollout-percentage"]').type('100') + cy.get('[data-attr="rollout-percentage"]').click().type('100') // save cy.get('[data-attr="save-survey"]').eq(0).click() @@ -202,7 +202,7 @@ describe('Surveys', () => { cy.get('[data-attr="prop-filter-person_properties-0"]').click() cy.get('[data-attr=prop-val]').click({ force: true }) cy.get('[data-attr=prop-val-0]').click({ force: true }) - cy.get('[data-attr="rollout-percentage"]').type('100') + cy.get('[data-attr="rollout-percentage"]').click().type('100') cy.get('[data-attr=save-survey]').first().click() diff --git a/cypress/wait.py b/cypress/wait.py index 275800beb6bfb..ceb58b45bc539 100644 --- a/cypress/wait.py +++ b/cypress/wait.py @@ -17,8 +17,8 @@ def main(): print("PostHog is alive! Proceeding") continue else: - # recieved not 200 from PostHog, but service is up - print("Found status %d" % (r.status,)) + # received not 200 from PostHog, but service is up + print(f"Found status {r.status:d}") with open("cypress/screenshots/curl.html", "wb") as f: f.write(r.read) # type: ignore print("PostHog is still booting. Sleeping for 1 second") diff --git a/ee/clickhouse/materialized_columns/columns.py b/ee/clickhouse/materialized_columns/columns.py index 308148597124c..c9624bf96bacd 100644 --- a/ee/clickhouse/materialized_columns/columns.py +++ b/ee/clickhouse/materialized_columns/columns.py @@ -1,21 +1,28 @@ from __future__ import annotations import re -from collections.abc import Iterator +from collections.abc import Callable, Iterator +from copy import copy from dataclasses import dataclass, replace from datetime import timedelta -from typing import Literal, NamedTuple, cast +from typing import Any, Literal, NamedTuple, TypeVar, cast -from clickhouse_driver.errors import ServerException +from clickhouse_driver import Client from django.utils.timezone import now +from posthog.clickhouse.client.connection import default_client +from posthog.clickhouse.cluster import ClickhouseCluster, ConnectionInfo, FuturesMap, HostInfo from posthog.clickhouse.kafka_engine import trim_quotes_expr from posthog.clickhouse.materialized_columns import ColumnName, TablesWithMaterializedColumns from posthog.client import sync_execute +from posthog.models.event.sql import EVENTS_DATA_TABLE from posthog.models.instance_setting import get_instance_setting +from posthog.models.person.sql import PERSONS_TABLE from posthog.models.property import PropertyName, TableColumn, TableWithProperties from posthog.models.utils import generate_random_short_suffix -from posthog.settings import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE, TEST +from posthog.settings import CLICKHOUSE_DATABASE, CLICKHOUSE_PER_TEAM_SETTINGS, TEST + +T = TypeVar("T") DEFAULT_TABLE_COLUMN: Literal["properties"] = "properties" @@ -116,135 +123,253 @@ def get_materialized_columns( } -def get_on_cluster_clause_for_table(table: TableWithProperties) -> str: - return f"ON CLUSTER '{CLICKHOUSE_CLUSTER}'" if table == "events" else "" +def get_cluster() -> ClickhouseCluster: + extra_hosts = [] + for host_config in map(copy, CLICKHOUSE_PER_TEAM_SETTINGS.values()): + extra_hosts.append(ConnectionInfo(host_config.pop("host"), host_config.pop("port", None))) + assert len(host_config) == 0, f"unexpected values: {host_config!r}" + return ClickhouseCluster(default_client(), extra_hosts=extra_hosts) -def materialize( - table: TableWithProperties, - property: PropertyName, - column_name: ColumnName | None = None, - table_column: TableColumn = DEFAULT_TABLE_COLUMN, - create_minmax_index=not TEST, -) -> ColumnName | None: - if (property, table_column) in get_materialized_columns(table): - if TEST: - return None +@dataclass +class TableInfo: + data_table: str - raise ValueError(f"Property already materialized. table={table}, property={property}, column={table_column}") + @property + def read_table(self) -> str: + return self.data_table - if table_column not in SHORT_TABLE_COLUMN_NAME: - raise ValueError(f"Invalid table_column={table_column} for materialisation") + def map_data_nodes(self, cluster: ClickhouseCluster, fn: Callable[[Client], T]) -> FuturesMap[HostInfo, T]: + return cluster.map_all_hosts(fn) - column_name = column_name or _materialized_column_name(table, property, table_column) - on_cluster = get_on_cluster_clause_for_table(table) - if table == "events": - sync_execute( - f""" - ALTER TABLE sharded_{table} {on_cluster} - ADD COLUMN IF NOT EXISTS - {column_name} VARCHAR MATERIALIZED {TRIM_AND_EXTRACT_PROPERTY.format(table_column=table_column)} - """, - {"property": property}, - settings={"alter_sync": 2 if TEST else 1}, - ) - sync_execute( - f""" - ALTER TABLE {table} {on_cluster} - ADD COLUMN IF NOT EXISTS - {column_name} VARCHAR - """, - settings={"alter_sync": 2 if TEST else 1}, - ) - else: - sync_execute( - f""" - ALTER TABLE {table} {on_cluster} - ADD COLUMN IF NOT EXISTS - {column_name} VARCHAR MATERIALIZED {TRIM_AND_EXTRACT_PROPERTY.format(table_column=table_column)} - """, - {"property": property}, - settings={"alter_sync": 2 if TEST else 1}, - ) +@dataclass +class ShardedTableInfo(TableInfo): + dist_table: str - sync_execute( - f"ALTER TABLE {table} {on_cluster} COMMENT COLUMN {column_name} %(comment)s", - {"comment": MaterializedColumnDetails(table_column, property, is_disabled=False).as_column_comment()}, - settings={"alter_sync": 2 if TEST else 1}, - ) + @property + def read_table(self) -> str: + return self.dist_table - if create_minmax_index: - add_minmax_index(table, column_name) + def map_data_nodes(self, cluster: ClickhouseCluster, fn: Callable[[Client], T]) -> FuturesMap[HostInfo, T]: + return cluster.map_one_host_per_shard(fn) - return column_name +tables: dict[str, TableInfo | ShardedTableInfo] = { + PERSONS_TABLE: TableInfo(PERSONS_TABLE), + "events": ShardedTableInfo(EVENTS_DATA_TABLE(), "events"), +} -def update_column_is_disabled(table: TablesWithMaterializedColumns, column_name: str, is_disabled: bool) -> None: - details = replace( - MaterializedColumn.get(table, column_name).details, - is_disabled=is_disabled, - ) - on_cluster = get_on_cluster_clause_for_table(table) - sync_execute( - f"ALTER TABLE {table} {on_cluster} COMMENT COLUMN {column_name} %(comment)s", - {"comment": details.as_column_comment()}, - settings={"alter_sync": 2 if TEST else 1}, - ) +@dataclass +class CreateColumnOnDataNodesTask: + table: str + column: MaterializedColumn + create_minmax_index: bool + add_column_comment: bool + def execute(self, client: Client) -> None: + actions = [ + f""" + ADD COLUMN IF NOT EXISTS {self.column.name} VARCHAR + MATERIALIZED {TRIM_AND_EXTRACT_PROPERTY.format(table_column=self.column.details.table_column)} + """, + ] + parameters = {"property": self.column.details.property_name} -def drop_column(table: TablesWithMaterializedColumns, column_name: str) -> None: - drop_minmax_index(table, column_name) + if self.add_column_comment: + actions.append(f"COMMENT COLUMN {self.column.name} %(comment)s") + parameters["comment"] = self.column.details.as_column_comment() - on_cluster = get_on_cluster_clause_for_table(table) - sync_execute( - f"ALTER TABLE {table} {on_cluster} DROP COLUMN IF EXISTS {column_name}", - settings={"alter_sync": 2 if TEST else 1}, - ) + if self.create_minmax_index: + index_name = f"minmax_{self.column.name}" + actions.append(f"ADD INDEX IF NOT EXISTS {index_name} {self.column.name} TYPE minmax GRANULARITY 1") - if table == "events": - sync_execute( - f"ALTER TABLE sharded_{table} {on_cluster} DROP COLUMN IF EXISTS {column_name}", - {"property": property}, + client.execute( + f"ALTER TABLE {self.table} " + ", ".join(actions), + parameters, settings={"alter_sync": 2 if TEST else 1}, ) -def add_minmax_index(table: TablesWithMaterializedColumns, column_name: ColumnName): - # Note: This will be populated on backfill - on_cluster = get_on_cluster_clause_for_table(table) - updated_table = "sharded_events" if table == "events" else table - index_name = f"minmax_{column_name}" +@dataclass +class CreateColumnOnQueryNodesTask: + table: str + column: MaterializedColumn - try: - sync_execute( + def execute(self, client: Client) -> None: + client.execute( f""" - ALTER TABLE {updated_table} {on_cluster} - ADD INDEX {index_name} {column_name} - TYPE minmax GRANULARITY 1 + ALTER TABLE {self.table} + ADD COLUMN IF NOT EXISTS {self.column.name} VARCHAR, + COMMENT COLUMN {self.column.name} %(comment)s """, + {"comment": self.column.details.as_column_comment()}, settings={"alter_sync": 2 if TEST else 1}, ) - except ServerException as err: - if "index with this name already exists" not in str(err): - raise - return index_name +def materialize( + table: TableWithProperties, + property: PropertyName, + column_name: ColumnName | None = None, + table_column: TableColumn = DEFAULT_TABLE_COLUMN, + create_minmax_index=not TEST, +) -> ColumnName | None: + if (property, table_column) in get_materialized_columns(table): + if TEST: + return None + + raise ValueError(f"Property already materialized. table={table}, property={property}, column={table_column}") -def drop_minmax_index(table: TablesWithMaterializedColumns, column_name: ColumnName) -> None: - on_cluster = get_on_cluster_clause_for_table(table) + if table_column not in SHORT_TABLE_COLUMN_NAME: + raise ValueError(f"Invalid table_column={table_column} for materialisation") - # XXX: copy/pasted from `add_minmax_index` - updated_table = "sharded_events" if table == "events" else table - index_name = f"minmax_{column_name}" + cluster = get_cluster() + table_info = tables[table] - sync_execute( - f"ALTER TABLE {updated_table} {on_cluster} DROP INDEX IF EXISTS {index_name}", - settings={"alter_sync": 2 if TEST else 1}, + column = MaterializedColumn( + name=column_name or _materialized_column_name(table, property, table_column), + details=MaterializedColumnDetails( + table_column=table_column, + property_name=property, + is_disabled=False, + ), ) + table_info.map_data_nodes( + cluster, + CreateColumnOnDataNodesTask( + table_info.data_table, + column, + create_minmax_index, + add_column_comment=table_info.read_table == table_info.data_table, + ).execute, + ).result() + + if isinstance(table_info, ShardedTableInfo): + cluster.map_all_hosts( + CreateColumnOnQueryNodesTask( + table_info.dist_table, + column, + ).execute + ).result() + + return column.name + + +@dataclass +class UpdateColumnCommentTask: + table: str + column: MaterializedColumn + + def execute(self, client: Client) -> None: + client.execute( + f"ALTER TABLE {self.table} COMMENT COLUMN {self.column.name} %(comment)s", + {"comment": self.column.details.as_column_comment()}, + settings={"alter_sync": 2 if TEST else 1}, + ) + + +def update_column_is_disabled(table: TablesWithMaterializedColumns, column_name: str, is_disabled: bool) -> None: + cluster = get_cluster() + table_info = tables[table] + + cluster.map_all_hosts( + UpdateColumnCommentTask( + table_info.read_table, + MaterializedColumn( + name=column_name, + details=replace( + MaterializedColumn.get(table, column_name).details, + is_disabled=is_disabled, + ), + ), + ).execute + ).result() + + +@dataclass +class DropColumnTask: + table: str + column_name: str + try_drop_index: bool + + def execute(self, client: Client) -> None: + # XXX: copy/pasted from create task + if self.try_drop_index: + index_name = f"minmax_{self.column_name}" + client.execute( + f"ALTER TABLE {self.table} DROP INDEX IF EXISTS {index_name}", + settings={"alter_sync": 2 if TEST else 1}, + ) + + client.execute( + f"ALTER TABLE {self.table} DROP COLUMN IF EXISTS {self.column_name}", + settings={"alter_sync": 2 if TEST else 1}, + ) + + +def drop_column(table: TablesWithMaterializedColumns, column_name: str) -> None: + cluster = get_cluster() + table_info = tables[table] + + if isinstance(table_info, ShardedTableInfo): + cluster.map_all_hosts( + DropColumnTask( + table_info.dist_table, + column_name, + try_drop_index=False, # no indexes on distributed tables + ).execute + ).result() + + table_info.map_data_nodes( + cluster, + DropColumnTask( + table_info.data_table, + column_name, + try_drop_index=True, + ).execute, + ).result() + + +@dataclass +class BackfillColumnTask: + table: str + columns: list[MaterializedColumn] + backfill_period: timedelta | None + test_settings: dict[str, Any] | None + + def execute(self, client: Client) -> None: + # Hack from https://github.com/ClickHouse/ClickHouse/issues/19785 + # Note that for this to work all inserts should list columns explicitly + # Improve this if https://github.com/ClickHouse/ClickHouse/issues/27730 ever gets resolved + for column in self.columns: + client.execute( + f""" + ALTER TABLE {self.table} + MODIFY COLUMN {column.name} VARCHAR DEFAULT {TRIM_AND_EXTRACT_PROPERTY.format(table_column=column.details.table_column)} + """, + {"property": column.details.property_name}, + settings=self.test_settings, + ) + + # Kick off mutations which will update clickhouse partitions in the background. This will return immediately + assignments = ", ".join(f"{column.name} = {column.name}" for column in self.columns) + + if self.backfill_period is not None: + where_clause = "timestamp > %(cutoff)s" + parameters = {"cutoff": (now() - self.backfill_period).strftime("%Y-%m-%d")} + else: + where_clause = "1 = 1" + parameters = {} + + client.execute( + f"ALTER TABLE {self.table} UPDATE {assignments} WHERE {where_clause}", + parameters, + settings=self.test_settings, + ) + def backfill_materialized_columns( table: TableWithProperties, @@ -261,40 +386,25 @@ def backfill_materialized_columns( if len(properties) == 0: return - updated_table = "sharded_events" if table == "events" else table - on_cluster = get_on_cluster_clause_for_table(table) - - materialized_columns = get_materialized_columns(table) - - # Hack from https://github.com/ClickHouse/ClickHouse/issues/19785 - # Note that for this to work all inserts should list columns explicitly - # Improve this if https://github.com/ClickHouse/ClickHouse/issues/27730 ever gets resolved - for property, table_column in properties: - sync_execute( - f""" - ALTER TABLE {updated_table} {on_cluster} - MODIFY COLUMN - {materialized_columns[(property, table_column)]} VARCHAR DEFAULT {TRIM_AND_EXTRACT_PROPERTY.format(table_column=table_column)} - """, - {"property": property}, - settings=test_settings, - ) - - # Kick off mutations which will update clickhouse partitions in the background. This will return immediately - assignments = ", ".join( - f"{materialized_columns[property_and_column]} = {materialized_columns[property_and_column]}" - for property_and_column in properties - ) + cluster = get_cluster() + table_info = tables[table] - sync_execute( - f""" - ALTER TABLE {updated_table} {on_cluster} - UPDATE {assignments} - WHERE {"timestamp > %(cutoff)s" if table == "events" else "1 = 1"} - """, - {"cutoff": (now() - backfill_period).strftime("%Y-%m-%d")}, - settings=test_settings, - ) + # TODO: this will eventually need to handle duplicates + materialized_columns = { + (column.details.property_name, column.details.table_column): column + for column in MaterializedColumn.get_all(table) + } + columns = [materialized_columns[property] for property in properties] + + table_info.map_data_nodes( + cluster, + BackfillColumnTask( + table_info.data_table, + columns, + backfill_period if table == "events" else None, # XXX + test_settings, + ).execute, + ).result() def _materialized_column_name( diff --git a/ee/hogai/taxonomy.py b/ee/hogai/taxonomy.py index a213dbfb75c0d..5c7feccbd1af5 100644 --- a/ee/hogai/taxonomy.py +++ b/ee/hogai/taxonomy.py @@ -761,6 +761,11 @@ class CoreFilterDefinition(TypedDict): "description": "What the call to feature flag responded with.", "examples": ["true", "false"], }, + "$feature_flag_payload": { + "label": "Feature Flag Response Payload", + "description": "The JSON payload that the call to feature flag responded with (if any)", + "examples": ['{"variant": "test"}'], + }, "$feature_flag": { "label": "Feature Flag", "description": 'The feature flag that was called.\n\nWarning! This only works in combination with the $feature_flag_called event. If you want to filter other events, try "Active Feature Flags".', diff --git a/ee/session_recordings/ai/__init__.py b/ee/session_recordings/ai/__init__.py deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/ee/session_recordings/ai/embeddings_queries.py b/ee/session_recordings/ai/embeddings_queries.py deleted file mode 100644 index 2034a9f190152..0000000000000 --- a/ee/session_recordings/ai/embeddings_queries.py +++ /dev/null @@ -1,109 +0,0 @@ -from django.conf import settings - - -from posthog.models import Team -from posthog.clickhouse.client import sync_execute - -BATCH_FLUSH_SIZE = settings.REPLAY_EMBEDDINGS_BATCH_SIZE -MIN_DURATION_INCLUDE_SECONDS = settings.REPLAY_EMBEDDINGS_MIN_DURATION_SECONDS - - -def fetch_errors_by_session_without_embeddings(team_id: int, offset=0) -> list[str]: - query = """ - WITH embedded_sessions AS ( - SELECT - session_id - FROM - session_replay_embeddings - WHERE - team_id = %(team_id)s - -- don't load all data for all time - AND generation_timestamp > now() - INTERVAL 7 DAY - AND source_type = 'error' - ) - SELECT log_source_id, message - FROM log_entries - PREWHERE - team_id = %(team_id)s - AND level = 'error' - AND log_source = 'session_replay' - AND timestamp <= now() - AND timestamp >= now() - INTERVAL 7 DAY - AND log_source_id NOT IN embedded_sessions - LIMIT %(batch_flush_size)s - -- when running locally the offset is used for paging - -- when running in celery the offset is not used - OFFSET %(offset)s - """ - - return sync_execute( - query, - { - "team_id": team_id, - "batch_flush_size": BATCH_FLUSH_SIZE, - "offset": offset, - }, - ) - - -def fetch_recordings_without_embeddings(team_id: int, offset=0) -> list[str]: - team = Team.objects.get(id=team_id) - - query = """ - WITH embedding_ids AS - ( - SELECT - session_id - FROM - session_replay_embeddings - WHERE - team_id = %(team_id)s - -- don't load all data for all time - AND generation_timestamp > now() - INTERVAL 7 DAY - ), - replay_with_events AS - ( - SELECT - distinct $session_id - FROM - events - WHERE - team_id = %(team_id)s - -- don't load all data for all time - AND timestamp > now() - INTERVAL 7 DAY - AND timestamp < now() - AND $session_id IS NOT NULL AND $session_id != '' - ) - SELECT session_id - FROM - session_replay_events - WHERE - session_id NOT IN embedding_ids - AND team_id = %(team_id)s - -- must be a completed session - AND min_first_timestamp < now() - INTERVAL 1 DAY - -- let's not load all data for all time - -- will definitely need to do something about this length of time - AND min_first_timestamp > now() - INTERVAL 7 DAY - AND session_id IN replay_with_events - GROUP BY session_id - HAVING dateDiff('second', min(min_first_timestamp), max(max_last_timestamp)) > %(min_duration_include_seconds)s - ORDER BY rand() - LIMIT %(batch_flush_size)s - -- when running locally the offset is used for paging - -- when running in celery the offset is not used - OFFSET %(offset)s - """ - - return [ - x[0] - for x in sync_execute( - query, - { - "team_id": team.pk, - "batch_flush_size": BATCH_FLUSH_SIZE, - "offset": offset, - "min_duration_include_seconds": MIN_DURATION_INCLUDE_SECONDS, - }, - ) - ] diff --git a/ee/session_recordings/ai/embeddings_runner.py b/ee/session_recordings/ai/embeddings_runner.py deleted file mode 100644 index 5125934b60161..0000000000000 --- a/ee/session_recordings/ai/embeddings_runner.py +++ /dev/null @@ -1,297 +0,0 @@ -import json -import tiktoken -import datetime -import pytz - -from typing import Any, Optional - -from abc import ABC, abstractmethod -from prometheus_client import Histogram, Counter -from structlog import get_logger -from openai import OpenAI - -from posthog.models import Team -from posthog.clickhouse.client import sync_execute - -from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents -from ee.session_recordings.ai.utils import ( - SessionSummaryPromptData, - simplify_window_id, - format_dates, - collapse_sequence_of_events, - only_pageview_urls, -) - -_encoding: Optional[tiktoken.Encoding] = None - - -def get_encoding() -> tiktoken.Encoding: - global _encoding - if not _encoding: - # NOTE: This does an API request so we want to ensure we load it lazily and not at startup - # tiktoken.encoding_for_model(model_name) specifies encoder - # model_name = "text-embedding-3-small" for this usecase - _encoding = tiktoken.get_encoding("cl100k_base") - return _encoding - - -MAX_TOKENS_FOR_MODEL = 8191 - -RECORDING_EMBEDDING_TOKEN_COUNT = Histogram( - "posthog_session_recordings_recording_embedding_token_count", - "Token count for individual recordings generated during embedding", - buckets=[0, 100, 500, 1000, 2000, 3000, 4000, 5000, 6000, 8000, 10000], - labelnames=["source_type"], -) - -GENERATE_RECORDING_EMBEDDING_TIMING = Histogram( - "posthog_session_recordings_generate_recording_embedding", - "Time spent generating recording embeddings for a single session", - buckets=[0.1, 0.2, 0.3, 0.4, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4, 5, 6, 7, 8, 9, 10, 12, 14, 16, 18, 20], - labelnames=["source_type"], -) - -SESSION_EMBEDDINGS_GENERATED = Counter( - "posthog_session_recordings_embeddings_generated", - "Number of session embeddings generated", - labelnames=["source_type"], -) - -SESSION_EMBEDDINGS_FAILED = Counter( - "posthog_session_recordings_embeddings_failed", - "Instance of an embedding request to open AI (and its surrounding work) failing and being swallowed", - labelnames=["source_type"], -) - -SESSION_EMBEDDINGS_FATAL_FAILED = Counter( - "posthog_session_recordings_embeddings_fatal_failed", - "Instance of the embeddings task failing and raising an exception", - labelnames=["source_type"], -) - -SESSION_EMBEDDINGS_WRITTEN_TO_CLICKHOUSE = Counter( - "posthog_session_recordings_embeddings_written_to_clickhouse", - "Number of session embeddings written to Clickhouse", - labelnames=["source_type"], -) - -SESSION_SKIPPED_WHEN_GENERATING_EMBEDDINGS = Counter( - "posthog_session_recordings_skipped_when_generating_embeddings", - "Number of sessions skipped when generating embeddings", - labelnames=["source_type", "reason"], -) - -SESSION_EMBEDDINGS_FAILED_TO_CLICKHOUSE = Counter( - "posthog_session_recordings_embeddings_failed_to_clickhouse", - "Number of session embeddings failed to Clickhouse", - labelnames=["source_type"], -) - - -logger = get_logger(__name__) - - -class EmbeddingPreparation(ABC): - source_type: str - - @staticmethod - @abstractmethod - def prepare(item, team) -> tuple[str, str]: - raise NotImplementedError() - - -class SessionEmbeddingsRunner(ABC): - team: Team - openai_client: Any - - def __init__(self, team: Team): - self.team = team - self.openai_client = OpenAI() - - def run(self, items: list[Any], embeddings_preparation: type[EmbeddingPreparation]) -> None: - source_type = embeddings_preparation.source_type - - try: - batched_embeddings = [] - - for item in items: - try: - logger.info( - f"generating embedding input for item", - flow="embeddings", - item=json.dumps(item), - source_type=source_type, - ) - - result = embeddings_preparation.prepare(item, self.team) - - if result: - session_id, input = result - - logger.info( - f"generating embedding for item", - flow="embeddings", - session_id=session_id, - source_type=source_type, - ) - - with GENERATE_RECORDING_EMBEDDING_TIMING.labels(source_type=source_type).time(): - embeddings = self._embed(input, source_type=source_type) - - logger.info( - f"generated embedding for item", - flow="embeddings", - session_id=session_id, - source_type=source_type, - ) - - if embeddings: - SESSION_EMBEDDINGS_GENERATED.labels(source_type=source_type).inc() - batched_embeddings.append( - { - "team_id": self.team.pk, - "session_id": session_id, - "embeddings": embeddings, - "source_type": source_type, - "input": input, - } - ) - # we don't want to fail the whole batch if only a single recording fails - except Exception as e: - SESSION_EMBEDDINGS_FAILED.labels(source_type=source_type).inc() - logger.exception( - f"embed individual item error", - flow="embeddings", - error=e, - source_type=source_type, - ) - # so we swallow errors here - - if len(batched_embeddings) > 0: - self._flush_embeddings_to_clickhouse(embeddings=batched_embeddings, source_type=source_type) - except Exception as e: - # but we don't swallow errors within the wider task itself - # if something is failing here then we're most likely having trouble with ClickHouse - SESSION_EMBEDDINGS_FATAL_FAILED.labels(source_type=source_type).inc() - logger.exception(f"embed items fatal error", flow="embeddings", error=e, source_type=source_type) - raise - - def _embed(self, input: str, source_type: str): - token_count = self._num_tokens_for_input(input) - RECORDING_EMBEDDING_TOKEN_COUNT.labels(source_type=source_type).observe(token_count) - if token_count > MAX_TOKENS_FOR_MODEL: - logger.error( - f"embedding input exceeds max token count for model", - flow="embeddings", - input=json.dumps(input), - source_type=source_type, - ) - SESSION_SKIPPED_WHEN_GENERATING_EMBEDDINGS.labels( - source_type=source_type, reason="token_count_too_high" - ).inc() - return None - - return ( - self.openai_client.embeddings.create( - input=input, - model="text-embedding-3-small", - ) - .data[0] - .embedding - ) - - def _num_tokens_for_input(self, string: str) -> int: - """Returns the number of tokens in a text string.""" - return len(get_encoding().encode(string)) - - def _flush_embeddings_to_clickhouse(self, embeddings: list[dict[str, Any]], source_type: str) -> None: - try: - sync_execute( - "INSERT INTO session_replay_embeddings (session_id, team_id, embeddings, source_type, input) VALUES", - embeddings, - ) - SESSION_EMBEDDINGS_WRITTEN_TO_CLICKHOUSE.labels(source_type=source_type).inc(len(embeddings)) - except Exception as e: - logger.exception(f"flush embeddings error", flow="embeddings", error=e, source_type=source_type) - SESSION_EMBEDDINGS_FAILED_TO_CLICKHOUSE.labels(source_type=source_type).inc(len(embeddings)) - raise - - -class ErrorEmbeddingsPreparation(EmbeddingPreparation): - source_type = "error" - - @staticmethod - def prepare(item: tuple[str, str], _): - session_id = item[0] - error_message = item[1] - return session_id, error_message - - -class SessionEventsEmbeddingsPreparation(EmbeddingPreparation): - source_type = "session" - - @staticmethod - def prepare(session_id: str, team: Team): - eight_days_ago = datetime.datetime.now(pytz.UTC) - datetime.timedelta(days=8) - session_metadata = SessionReplayEvents().get_metadata( - session_id=str(session_id), team=team, recording_start_time=eight_days_ago - ) - if not session_metadata: - logger.error(f"no session metadata found for session", flow="embeddings", session_id=session_id) - SESSION_SKIPPED_WHEN_GENERATING_EMBEDDINGS.labels( - source_type=SessionEventsEmbeddingsPreparation.source_type, reason="metadata_missing" - ).inc() - return None - - session_events = SessionReplayEvents().get_events( - session_id=str(session_id), - team=team, - metadata=session_metadata, - events_to_ignore=[ - "$feature_flag_called", - ], - ) - - if not session_events or not session_events[0] or not session_events[1]: - logger.error(f"no events found for session", flow="embeddings", session_id=session_id) - SESSION_SKIPPED_WHEN_GENERATING_EMBEDDINGS.labels( - source_type=SessionEventsEmbeddingsPreparation.source_type, reason="events_missing" - ).inc() - return None - - processed_sessions = collapse_sequence_of_events( - only_pageview_urls( - format_dates( - simplify_window_id(SessionSummaryPromptData(columns=session_events[0], results=session_events[1])), - start=datetime.datetime(1970, 1, 1, tzinfo=pytz.UTC), # epoch timestamp - ) - ) - ) - - logger.info(f"collapsed events for session", flow="embeddings", session_id=session_id) - - processed_sessions_index = processed_sessions.column_index("event") - current_url_index = processed_sessions.column_index("$current_url") - elements_chain_index = processed_sessions.column_index("elements_chain") - - input = ( - str(session_metadata) - + "\n" - + "\n".join( - SessionEventsEmbeddingsPreparation._compact_result( - event_name=result[processed_sessions_index] if processed_sessions_index is not None else "", - current_url=result[current_url_index] if current_url_index is not None else "", - elements_chain=result[elements_chain_index] if elements_chain_index is not None else "", - ) - for result in processed_sessions.results - ) - ) - - return session_id, input - - @staticmethod - def _compact_result(event_name: str, current_url: int, elements_chain: dict[str, str] | str) -> str: - elements_string = ( - elements_chain if isinstance(elements_chain, str) else ", ".join(str(e) for e in elements_chain) - ) - return f"{event_name} {current_url} {elements_string}" diff --git a/ee/session_recordings/ai/error_clustering.py b/ee/session_recordings/ai/error_clustering.py deleted file mode 100644 index 0e03a755f41e9..0000000000000 --- a/ee/session_recordings/ai/error_clustering.py +++ /dev/null @@ -1,99 +0,0 @@ -from prometheus_client import Histogram -from django.conf import settings -from posthog.clickhouse.client import sync_execute -from posthog.models import Team -from sklearn.cluster import DBSCAN -import pandas as pd -import numpy as np -from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed -from datetime import date - -CLUSTER_REPLAY_ERRORS_TIMING = Histogram( - "posthog_session_recordings_cluster_replay_errors", - "Time spent clustering the embeddings of replay errors", - buckets=[0.5, 1, 2, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60], -) - -CLUSTER_REPLAY_ERRORS_CLUSTER_COUNT = Histogram( - "posthog_session_recordings_errors_cluster_count", - "Count of clusters identified from error messages per team", - buckets=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14, 16, 18, 20, 25, 30, 35, 40, 45, 50], - labelnames=["team_id"], -) - -DBSCAN_EPS = settings.REPLAY_EMBEDDINGS_CLUSTERING_DBSCAN_EPS -DBSCAN_MIN_SAMPLES = settings.REPLAY_EMBEDDINGS_CLUSTERING_DBSCAN_MIN_SAMPLES - - -def error_clustering(team: Team): - results = fetch_error_embeddings(team.pk) - - if not results: - return [] - - df = pd.DataFrame(results, columns=["session_id", "error", "embeddings", "timestamp"]) - - df["cluster"] = cluster_embeddings(df["embeddings"].tolist()) - - CLUSTER_REPLAY_ERRORS_CLUSTER_COUNT.labels(team_id=team.pk).observe(df["cluster"].nunique()) - - return construct_response(df, team) - - -def fetch_error_embeddings(team_id: int): - query = """ - SELECT - session_id, input, embeddings, generation_timestamp - FROM - session_replay_embeddings - WHERE - team_id = %(team_id)s - -- don't load all data for all time - AND generation_timestamp > now() - INTERVAL 7 DAY - AND source_type = 'error' - AND input != '' - """ - - return sync_execute( - query, - {"team_id": team_id}, - ) - - -def cluster_embeddings(embeddings): - dbscan = DBSCAN(eps=DBSCAN_EPS, min_samples=DBSCAN_MIN_SAMPLES) - with CLUSTER_REPLAY_ERRORS_TIMING.time(): - dbscan.fit(embeddings) - return dbscan.labels_ - - -def construct_response(df: pd.DataFrame, team: Team): - viewed_session_ids = list( - SessionRecordingViewed.objects.filter(team=team, session_id__in=df["session_id"].unique()) - .values_list("session_id", flat=True) - .distinct() - ) - - clusters = [] - for cluster, rows in df.groupby("cluster"): - session_ids = rows["session_id"].unique() - sample = rows.sample(n=1)[["session_id", "error"]].to_dict("records")[0] - - date_series = ( - rows.groupby([rows["timestamp"].dt.date]) - .size() - .reindex(pd.date_range(end=date.today(), periods=7), fill_value=0) - ) - sparkline = dict(zip(date_series.index.astype(str), date_series)) - clusters.append( - { - "cluster": cluster, - "sample": sample.get("error"), - "session_ids": np.random.choice(session_ids, size=DBSCAN_MIN_SAMPLES - 1), - "occurrences": rows.size, - "sparkline": sparkline, - "unique_sessions": len(session_ids), - "viewed": len(np.intersect1d(session_ids, viewed_session_ids, assume_unique=True)), - } - ) - return clusters diff --git a/ee/session_recordings/ai/similar_recordings.py b/ee/session_recordings/ai/similar_recordings.py deleted file mode 100644 index a267459cc8087..0000000000000 --- a/ee/session_recordings/ai/similar_recordings.py +++ /dev/null @@ -1,57 +0,0 @@ -from prometheus_client import Histogram - -from posthog.clickhouse.client import sync_execute -from posthog.models.team import Team -from posthog.session_recordings.models.session_recording import SessionRecording - -FIND_RECORDING_NEIGHBOURS_TIMING = Histogram( - "posthog_session_recordings_find_recording_neighbours", - "Time spent finding the most similar recording embeddings for a single session", -) - - -def similar_recordings(recording: SessionRecording, team: Team): - with FIND_RECORDING_NEIGHBOURS_TIMING.time(): - similar_embeddings = closest_embeddings(session_id=recording.session_id, team_id=team.pk) - - # TODO: join session recording context (person, duration, etc) to show in frontend - - return similar_embeddings - - -def closest_embeddings(session_id: str, team_id: int): - query = """ - WITH ( - SELECT - argMax(embeddings, generation_timestamp) as target_embeddings - FROM - session_replay_embeddings - WHERE - team_id = %(team_id)s - -- don't load all data for all time - AND generation_timestamp > now() - INTERVAL 7 DAY - AND session_id = %(session_id)s - group by session_id - LIMIT 1 - ) as target_embeddings - SELECT - session_id, - -- distance function choice based on https://help.openai.com/en/articles/6824809-embeddings-frequently-asked-questions - -- OpenAI normalizes embeddings so L2 should produce the same score but is slightly slower - cosineDistance(embeddings, target_embeddings) AS similarity_score - FROM session_replay_embeddings - WHERE - team_id = %(team_id)s - -- don't load all data for all time - AND generation_timestamp > now() - INTERVAL 7 DAY - -- skip the target recording - AND session_id != %(session_id)s - ORDER BY similarity_score ASC - -- only return a max number of results - LIMIT %(limit)s; - """ - - return sync_execute( - query, - {"team_id": team_id, "session_id": session_id, "limit": 3}, - ) diff --git a/ee/session_recordings/ai/utils.py b/ee/session_recordings/ai/utils.py index 7345abb3183b0..38ef49cfdb190 100644 --- a/ee/session_recordings/ai/utils.py +++ b/ee/session_recordings/ai/utils.py @@ -3,8 +3,6 @@ from typing import Any -from hashlib import shake_256 - @dataclasses.dataclass class SessionSummaryPromptData: @@ -59,42 +57,6 @@ def simplify_window_id(session_events: SessionSummaryPromptData) -> SessionSumma return dataclasses.replace(session_events, results=simplified_results) -def only_pageview_urls(session_events: SessionSummaryPromptData) -> SessionSummaryPromptData: - """ - including the url with every event is a lot of duplication, - so we remove it from all events except pageviews - """ - if session_events.is_empty(): - return session_events - - # find url column index - url_index = session_events.column_index("$current_url") - event_index = session_events.column_index("event") - - pageview_results = [] - for result in session_events.results: - if url_index is None or event_index is None: - pageview_results.append(result) - continue - - url: str | None = result[url_index] - event: str | None = result[event_index] - if not url: - pageview_results.append(result) - continue - if event == "$pageview": - pageview_results.append(result) - continue - - # otherwise we hash the url, so we have ~one token per event - # this would mean sessions with multiple events that only - # differ by URL should still have some distance between them - result[url_index] = shake_256(url.encode("utf-8")).hexdigest(4) - pageview_results.append(result) - - return dataclasses.replace(session_events, results=pageview_results) - - def deduplicate_urls(session_events: SessionSummaryPromptData) -> SessionSummaryPromptData: if session_events.is_empty(): return session_events diff --git a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr new file mode 100644 index 0000000000000..bcd1ed1e3c8cb --- /dev/null +++ b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr @@ -0,0 +1,1649 @@ +# serializer version: 1 +# name: TestClickhouseSessionRecordingsListFromQuery.test_effect_of_poe_settings_on_query_generated_0_test_poe_v1_still_falls_back_to_person_subquery + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, + max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, + dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 50000 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_effect_of_poe_settings_on_query_generated_1_test_poe_being_unavailable_we_fall_back_to_person_id_overrides + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, + max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, + dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 50000 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_effect_of_poe_settings_on_query_generated_2_test_poe_being_unavailable_we_fall_back_to_person_subquery_but_still_use_mat_props + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, + max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, + dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 50000 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_effect_of_poe_settings_on_query_generated_3_test_allow_denormalised_props_fix_does_not_stop_all_poe_processing + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, + max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, + dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 50000 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_effect_of_poe_settings_on_query_generated_4_test_poe_v2_available_person_properties_are_used_in_replay_listing + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, %(hogql_val_0)s)) AS start_time, + max(toTimeZone(s.max_last_timestamp, %(hogql_val_1)s)) AS end_time, + dateDiff(%(hogql_val_2)s, start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 50000 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_00_poe_v2_and_materialized_columns_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_00_poe_v2_and_materialized_columns_allowed_with_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_01_poe_v2_and_materialized_columns_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_01_poe_v2_and_materialized_columns_allowed_without_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_02_poe_v2_and_materialized_columns_off_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_02_poe_v2_and_materialized_columns_off_with_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_03_poe_v2_and_materialized_columns_off_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_03_poe_v2_and_materialized_columns_off_without_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_04_poe_off_and_materialized_columns_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_04_poe_off_and_materialized_columns_allowed_with_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_05_poe_off_and_materialized_columns_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_05_poe_off_and_materialized_columns_allowed_without_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_06_poe_off_and_materialized_columns_not_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_06_poe_off_and_materialized_columns_not_allowed_with_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_07_poe_off_and_materialized_columns_not_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_07_poe_off_and_materialized_columns_not_allowed_without_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_08_poe_v1_and_materialized_columns_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_08_poe_v1_and_materialized_columns_allowed_with_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_09_poe_v1_and_materialized_columns_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_09_poe_v1_and_materialized_columns_allowed_without_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_event_filter_with_person_properties_materialized_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_email, ''), 'null'), 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_00_poe_v2_and_materialized_columns_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_01_poe_v2_and_materialized_columns_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_02_poe_v2_and_materialized_columns_off_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_03_poe_v2_and_materialized_columns_off_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.person_id, '00000000-0000-0000-0000-000000000000'), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_04_poe_off_and_materialized_columns_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_05_poe_off_and_materialized_columns_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_06_poe_off_and_materialized_columns_not_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_07_poe_off_and_materialized_columns_not_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_08_poe_v1_and_materialized_columns_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_09_poe_v1_and_materialized_columns_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_10_poe_v1_and_not_materialized_columns_not_allowed_with_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestClickhouseSessionRecordingsListFromQuery.test_person_id_filter_11_poe_v1_and_not_materialized_columns_not_allowed_without_materialization + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- diff --git a/ee/session_recordings/queries/test/test_session_recording_list_from_query.py b/ee/session_recordings/queries/test/test_session_recording_list_from_query.py new file mode 100644 index 0000000000000..94d54baaf52a2 --- /dev/null +++ b/ee/session_recordings/queries/test/test_session_recording_list_from_query.py @@ -0,0 +1,347 @@ +import re +from itertools import product +from uuid import uuid4 + +from dateutil.relativedelta import relativedelta +from django.utils.timezone import now +from freezegun import freeze_time +from parameterized import parameterized + +from ee.clickhouse.materialized_columns.columns import materialize +from posthog.clickhouse.client import sync_execute +from posthog.hogql.ast import CompareOperation, And, SelectQuery +from posthog.hogql.base import Expr +from posthog.hogql.context import HogQLContext +from posthog.hogql.printer import print_ast +from posthog.models import Person +from posthog.schema import PersonsOnEventsMode, RecordingsQuery +from posthog.session_recordings.queries.session_recording_list_from_query import SessionRecordingListFromQuery +from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.sql.session_replay_event_sql import TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + QueryMatchingTest, + snapshot_clickhouse_queries, + _create_event, +) + + +# The HogQL pair of TestClickhouseSessionRecordingsListFromSessionReplay can be renamed when delete the old one +@freeze_time("2021-01-01T13:46:23") +class TestClickhouseSessionRecordingsListFromQuery(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest): + def _print_query(self, query: SelectQuery) -> str: + return print_ast( + query, + HogQLContext(team_id=self.team.pk, enable_select_queries=True), + "clickhouse", + pretty=True, + ) + + def tearDown(self) -> None: + sync_execute(TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL()) + + @property + def base_time(self): + return (now() - relativedelta(hours=1)).replace(microsecond=0, second=0) + + def create_event( + self, + distinct_id, + timestamp, + team=None, + event_name="$pageview", + properties=None, + ): + if team is None: + team = self.team + if properties is None: + properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"} + return _create_event( + team=team, + event=event_name, + timestamp=timestamp, + distinct_id=distinct_id, + properties=properties, + ) + + @parameterized.expand( + [ + [ + "test_poe_v1_still_falls_back_to_person_subquery", + True, + False, + False, + PersonsOnEventsMode.PERSON_ID_NO_OVERRIDE_PROPERTIES_ON_EVENTS, + ], + [ + "test_poe_being_unavailable_we_fall_back_to_person_id_overrides", + False, + False, + False, + PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_JOINED, + ], + [ + "test_poe_being_unavailable_we_fall_back_to_person_subquery_but_still_use_mat_props", + False, + False, + False, + PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_JOINED, + ], + [ + "test_allow_denormalised_props_fix_does_not_stop_all_poe_processing", + False, + True, + False, + PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS, + ], + [ + "test_poe_v2_available_person_properties_are_used_in_replay_listing", + False, + True, + True, + PersonsOnEventsMode.PERSON_ID_OVERRIDE_PROPERTIES_ON_EVENTS, + ], + ] + ) + def test_effect_of_poe_settings_on_query_generated( + self, + _name: str, + poe_v1: bool, + poe_v2: bool, + allow_denormalized_props: bool, + expected_poe_mode: PersonsOnEventsMode, + ) -> None: + with self.settings( + PERSON_ON_EVENTS_OVERRIDE=poe_v1, + PERSON_ON_EVENTS_V2_OVERRIDE=poe_v2, + ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalized_props, + ): + assert self.team.person_on_events_mode == expected_poe_mode + materialize("events", "rgInternal", table_column="person_properties") + + query = RecordingsQuery.model_validate( + { + "properties": [ + { + "key": "rgInternal", + "value": ["false"], + "operator": "exact", + "type": "person", + } + ] + }, + ) + session_recording_list_instance = SessionRecordingListFromQuery( + query=query, team=self.team, hogql_query_modifiers=None + ) + + hogql_parsed_select = session_recording_list_instance.get_query() + printed_query = self._print_query(hogql_parsed_select) + + person_filtering_expr = self._matching_person_filter_expr_from(hogql_parsed_select) + + self._assert_is_events_person_filter(person_filtering_expr) + + if poe_v1 or poe_v2: + # Property used directly from event (from materialized column) + assert "ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null')" in printed_query + else: + # We get the person property value from the persons JOIN + assert re.search( + r"argMax\(replaceRegexpAll\(nullIf\(nullIf\(JSONExtractRaw\(person\.properties, %\(hogql_val_\d+\)s\), ''\), 'null'\), '^\"|\"\$', ''\), person\.version\) AS properties___rgInternal", + printed_query, + ) + # Then we actually filter on that property value + assert re.search( + r"ifNull\(equals\(events__person\.properties___rgInternal, %\(hogql_val_\d+\)s\), 0\)", + printed_query, + ) + self.assertQueryMatchesSnapshot(printed_query) + + def _assert_is_pdi_filter(self, person_filtering_expr: list[Expr]) -> None: + assert person_filtering_expr[0].right.select_from.table.chain == ["person_distinct_ids"] + assert person_filtering_expr[0].right.where.left.chain == ["person", "properties", "rgInternal"] + + def _assert_is_events_person_filter(self, person_filtering_expr: list[Expr]) -> None: + assert person_filtering_expr[0].right.select_from.table.chain == ["events"] + event_person_condition = [ + x + for x in person_filtering_expr[0].right.where.exprs + if isinstance(x, CompareOperation) and x.left.chain == ["person", "properties", "rgInternal"] + ] + assert len(event_person_condition) == 1 + + def _matching_person_filter_expr_from(self, hogql_parsed_select: SelectQuery) -> list[Expr]: + where_conditions: list[Expr] = hogql_parsed_select.where.exprs + ands = [x for x in where_conditions if isinstance(x, And)] + assert len(ands) == 1 + and_comparisons = [x for x in ands[0].exprs if isinstance(x, CompareOperation)] + assert len(and_comparisons) == 1 + assert isinstance(and_comparisons[0].right, SelectQuery) + return and_comparisons + + settings_combinations = [ + ["poe v2 and materialized columns allowed", False, True, True], + ["poe v2 and materialized columns off", False, True, False], + ["poe off and materialized columns allowed", False, False, True], + ["poe off and materialized columns not allowed", False, False, False], + ["poe v1 and materialized columns allowed", True, False, True], + ["poe v1 and not materialized columns not allowed", True, False, False], + ] + + # Options for "materialize person columns" + materialization_options = [ + [" with materialization", True], + [" without materialization", False], + ] + + # Expand the parameter list to the product of all combinations with "materialize person columns" + # e.g. [a, b] x [c, d] = [a, c], [a, d], [b, c], [b, d] + test_case_combinations = [ + [f"{name}{mat_option}", poe_v1, poe, mat_columns, mat_person] + for (name, poe_v1, poe, mat_columns), (mat_option, mat_person) in product( + settings_combinations, materialization_options + ) + ] + + @parameterized.expand(test_case_combinations) + @snapshot_clickhouse_queries + def test_event_filter_with_person_properties_materialized( + self, + _name: str, + poe1_enabled: bool, + poe2_enabled: bool, + allow_denormalised_props: bool, + materialize_person_props: bool, + ) -> None: + # KLUDGE: I couldn't figure out how to use @also_test_with_materialized_columns(person_properties=["email"]) + # KLUDGE: and the parameterized.expand decorator at the same time, so we generate test case combos + # KLUDGE: for materialization on and off to test both sides the way the decorator would have + if materialize_person_props: + materialize("events", "email", table_column="person_properties") + materialize("person", "email") + + with self.settings( + PERSON_ON_EVENTS_OVERRIDE=poe1_enabled, + PERSON_ON_EVENTS_V2_OVERRIDE=poe2_enabled, + ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalised_props, + ): + user_one = "test_event_filter_with_person_properties-user" + user_two = "test_event_filter_with_person_properties-user2" + session_id_one = f"test_event_filter_with_person_properties-1-{str(uuid4())}" + session_id_two = f"test_event_filter_with_person_properties-2-{str(uuid4())}" + + Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) + Person.objects.create(team=self.team, distinct_ids=[user_two], properties={"email": "bla2"}) + + self._add_replay_with_pageview(session_id_one, user_one) + produce_replay_summary( + distinct_id=user_one, + session_id=session_id_one, + first_timestamp=(self.base_time + relativedelta(seconds=30)), + team_id=self.team.id, + ) + self._add_replay_with_pageview(session_id_two, user_two) + produce_replay_summary( + distinct_id=user_two, + session_id=session_id_two, + first_timestamp=(self.base_time + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + match_everyone_filter = RecordingsQuery.model_validate( + {"properties": []}, + ) + + session_recording_list_instance = SessionRecordingListFromQuery( + query=match_everyone_filter, team=self.team, hogql_query_modifiers=None + ) + (session_recordings, _, _) = session_recording_list_instance.run() + + assert sorted([x["session_id"] for x in session_recordings]) == sorted([session_id_one, session_id_two]) + + match_bla_filter = RecordingsQuery.model_validate( + { + "properties": [ + { + "key": "email", + "value": ["bla"], + "operator": "exact", + "type": "person", + } + ] + }, + ) + + session_recording_list_instance = SessionRecordingListFromQuery( + query=match_bla_filter, team=self.team, hogql_query_modifiers=None + ) + (session_recordings, _, _) = session_recording_list_instance.run() + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + def _add_replay_with_pageview(self, session_id: str, user: str) -> None: + self.create_event( + user, + self.base_time, + properties={"$session_id": session_id, "$window_id": str(uuid4())}, + ) + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.base_time, + team_id=self.team.id, + ) + + @parameterized.expand(test_case_combinations) + @snapshot_clickhouse_queries + def test_person_id_filter( + self, + _name: str, + poe2_enabled: bool, + poe1_enabled: bool, + allow_denormalised_props: bool, + materialize_person_props: bool, + ) -> None: + # KLUDGE: I couldn't figure out how to use @also_test_with_materialized_columns(person_properties=["email"]) + # KLUDGE: and the parameterized.expand decorator at the same time, so we generate test case combos + # KLUDGE: for materialization on and off to test both sides the way the decorator would have + if materialize_person_props: + # it shouldn't matter to this test whether any column is materialized + # but let's keep the tests in this file similar so we flush out any unexpected interactions + materialize("events", "email", table_column="person_properties") + materialize("person", "email") + + with self.settings( + PERSON_ON_EVENTS_OVERRIDE=poe1_enabled, + PERSON_ON_EVENTS_V2_OVERRIDE=poe2_enabled, + ALLOW_DENORMALIZED_PROPS_IN_LISTING=allow_denormalised_props, + ): + three_user_ids = ["person-1-distinct-1", "person-1-distinct-2", "person-2"] + session_id_one = f"test_person_id_filter-session-one" + session_id_two = f"test_person_id_filter-session-two" + session_id_three = f"test_person_id_filter-session-three" + + p = Person.objects.create( + team=self.team, + distinct_ids=[three_user_ids[0], three_user_ids[1]], + properties={"email": "bla"}, + ) + Person.objects.create( + team=self.team, + distinct_ids=[three_user_ids[2]], + properties={"email": "bla2"}, + ) + + self._add_replay_with_pageview(session_id_one, three_user_ids[0]) + self._add_replay_with_pageview(session_id_two, three_user_ids[1]) + self._add_replay_with_pageview(session_id_three, three_user_ids[2]) + + query = RecordingsQuery.model_validate({"person_uuid": str(p.uuid)}) + session_recording_list_instance = SessionRecordingListFromQuery( + query=query, team=self.team, hogql_query_modifiers=None + ) + (session_recordings, _, _) = session_recording_list_instance.run() + assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one]) diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index 8947e1c270ee4..a3dc50c1228f5 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -34,7 +34,13 @@ ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle, ) -from posthog.session_recordings.session_recording_api import list_recordings_response +from posthog.schema import RecordingsQuery +from posthog.session_recordings.session_recording_api import ( + list_recordings_response, + list_recordings, + query_as_params_to_dict, + list_recordings_from_query, +) from posthog.utils import relative_date_parse logger = structlog.get_logger(__name__) @@ -224,10 +230,19 @@ def recordings(self, request: request.Request, *args: Any, **kwargs: Any) -> res .values_list("recording_id", flat=True) ) - filter = SessionRecordingsFilter(request=request, team=self.team) - filter = filter.shallow_clone({SESSION_RECORDINGS_FILTER_IDS: json.dumps(playlist_items)}) + use_query_type = (request.GET.get("as_query", "False")).lower() == "true" - return list_recordings_response(filter, request, self.get_serializer_context()) + if use_query_type: + data_dict = query_as_params_to_dict(request.GET.dict()) + query = RecordingsQuery.model_validate(data_dict) + query.session_ids = playlist_items + return list_recordings_response( + list_recordings_from_query(query, request, context=self.get_serializer_context()) + ) + else: + filter = SessionRecordingsFilter(request=request, team=self.team) + filter = filter.shallow_clone({SESSION_RECORDINGS_FILTER_IDS: json.dumps(playlist_items)}) + return list_recordings_response(list_recordings(filter, request, context=self.get_serializer_context())) # As of now, you can only "update" a session recording by adding or removing a recording from a static playlist @action( diff --git a/ee/session_recordings/session_summary/__init__.py b/ee/session_recordings/session_summary/__init__.py deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/ee/session_recordings/session_summary/test/__init__.py b/ee/session_recordings/session_summary/test/__init__.py deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/ee/tasks/__init__.py b/ee/tasks/__init__.py index cf44299a71f71..4bc793399424b 100644 --- a/ee/tasks/__init__.py +++ b/ee/tasks/__init__.py @@ -7,12 +7,6 @@ handle_subscription_value_change, schedule_all_subscriptions, ) -from .replay import ( - embed_batch_of_recordings_task, - generate_recordings_embeddings_batch, - generate_replay_embedding_error_clusters, - cluster_replay_error_embeddings, -) # As our EE tasks are not included at startup for Celery, we need to ensure they are declared here so that they are imported by posthog/settings/celery.py @@ -22,8 +16,4 @@ "schedule_all_subscriptions", "deliver_subscription_report", "handle_subscription_value_change", - "embed_batch_of_recordings_task", - "generate_recordings_embeddings_batch", - "generate_replay_embedding_error_clusters", - "cluster_replay_error_embeddings", ] diff --git a/ee/tasks/replay.py b/ee/tasks/replay.py deleted file mode 100644 index fcf57196c2dc5..0000000000000 --- a/ee/tasks/replay.py +++ /dev/null @@ -1,99 +0,0 @@ -from typing import Any - -import structlog -from celery import shared_task - -from ee.session_recordings.ai.embeddings_queries import ( - fetch_errors_by_session_without_embeddings, - fetch_recordings_without_embeddings, -) -from ee.session_recordings.ai.embeddings_runner import ( - SessionEmbeddingsRunner, - ErrorEmbeddingsPreparation, - SessionEventsEmbeddingsPreparation, -) -from ee.session_recordings.ai.error_clustering import error_clustering -from posthog import settings -from posthog.models import Team -from posthog.tasks.utils import CeleryQueue -from django.core.cache import cache - -logger = structlog.get_logger(__name__) - - -# rate limits are per worker, and this task makes multiple calls to open AI -# we currently are allowed 500 calls per minute, so let's rate limit each worker -# to much less than that -@shared_task(ignore_result=False, queue=CeleryQueue.SESSION_REPLAY_EMBEDDINGS.value, rate_limit="75/m") -def embed_batch_of_recordings_task(recordings: list[Any], team_id: int) -> None: - try: - team = Team.objects.get(id=team_id) - runner = SessionEmbeddingsRunner(team=team) - - runner.run(recordings, embeddings_preparation=SessionEventsEmbeddingsPreparation) - - results = fetch_errors_by_session_without_embeddings(team.pk) - runner.run(results, embeddings_preparation=ErrorEmbeddingsPreparation) - except Team.DoesNotExist: - logger.info(f"[embed_batch_of_recordings_task] Team {team} does not exist. Skipping.") - pass - - -@shared_task(ignore_result=True) -def generate_recordings_embeddings_batch() -> None: - # see https://docs.celeryq.dev/en/stable/userguide/canvas.html - # we have three jobs to do here - # 1. get a batch of recordings - # 2. for each recording - ideally in parallel - generate an embedding - # 3. update CH with the embeddings in one update operation - # in Celery that's a chain of tasks - # with step 2 being a group of tasks - # chord( - # embed_single_recording.si(recording.session_id, recording.team_id) - # for recording in fetch_recordings_without_embeddings(int(team)) - # )(generate_recordings_embeddings_batch_on_complete.si()) - # but even the docs call out performance impact of synchronising tasks - # - # so, for now, we'll do that naively - - for team_id in settings.REPLAY_EMBEDDINGS_ALLOWED_TEAMS: - try: - recordings = fetch_recordings_without_embeddings(int(team_id)) - logger.info( - f"[generate_recordings_embeddings_batch] Fetched {len(recordings)} recordings", - recordings=recordings, - flow="embeddings", - team_id=team_id, - ) - embed_batch_of_recordings_task.si(recordings, int(team_id)).apply_async() - except Exception as e: - logger.error(f"[generate_recordings_embeddings_batch] Error: {e}.", exc_info=True, error=e) - pass - - -@shared_task(ignore_result=True) -def generate_replay_embedding_error_clusters() -> None: - for team_id in settings.REPLAY_EMBEDDINGS_ALLOWED_TEAMS: - try: - cluster_replay_error_embeddings.si(int(team_id)).apply_async() - except Exception as e: - logger.error(f"[generate_replay_error_clusters] Error: {e}.", exc_info=True, error=e) - pass - - -@shared_task(ignore_result=True, queue=CeleryQueue.SESSION_REPLAY_EMBEDDINGS.value) -def cluster_replay_error_embeddings(team_id: int) -> None: - try: - team = Team.objects.get(id=team_id) - clusters = error_clustering(team) - - cache.set(f"cluster_errors_{team.pk}", clusters, settings.CACHED_RESULTS_TTL) - - logger.info( - f"[generate_replay_error_clusters] Completed for team", - flow="embeddings", - team_id=team_id, - ) - except Team.DoesNotExist: - logger.info(f"[generate_replay_error_clusters] Team {team} does not exist. Skipping.") - pass diff --git a/frontend/__snapshots__/components-html-elements-display--editable-display--dark.png b/frontend/__snapshots__/components-html-elements-display--editable-display--dark.png index 1db0b9cfa879c..9026ab8970dca 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--editable-display--dark.png and b/frontend/__snapshots__/components-html-elements-display--editable-display--dark.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--editable-display--light.png b/frontend/__snapshots__/components-html-elements-display--editable-display--light.png index 4173cbeaa918e..1613b8a582275 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--editable-display--light.png and b/frontend/__snapshots__/components-html-elements-display--editable-display--light.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--editable-display-with-preselection--dark.png b/frontend/__snapshots__/components-html-elements-display--editable-display-with-preselection--dark.png index 76e461183a861..f810f72a08c38 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--editable-display-with-preselection--dark.png and b/frontend/__snapshots__/components-html-elements-display--editable-display-with-preselection--dark.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--editable-display-with-preselection--light.png b/frontend/__snapshots__/components-html-elements-display--editable-display-with-preselection--light.png index 40ba5f5bfc0ce..e915d0875abc1 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--editable-display-with-preselection--light.png and b/frontend/__snapshots__/components-html-elements-display--editable-display-with-preselection--light.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--small-read-only-display--dark.png b/frontend/__snapshots__/components-html-elements-display--small-read-only-display--dark.png new file mode 100644 index 0000000000000..6ee37359aa53d Binary files /dev/null and b/frontend/__snapshots__/components-html-elements-display--small-read-only-display--dark.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--small-read-only-display--light.png b/frontend/__snapshots__/components-html-elements-display--small-read-only-display--light.png new file mode 100644 index 0000000000000..741e349ac329a Binary files /dev/null and b/frontend/__snapshots__/components-html-elements-display--small-read-only-display--light.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--dark.png b/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--dark.png new file mode 100644 index 0000000000000..21c8c0c700b98 Binary files /dev/null and b/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--dark.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--light.png b/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--light.png new file mode 100644 index 0000000000000..447fa7b473966 Binary files /dev/null and b/frontend/__snapshots__/components-html-elements-display--small-with-uniqueness-check--light.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--dark.png b/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--dark.png index 0e7cb8823ec3c..21c8c0c700b98 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--dark.png and b/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--dark.png differ diff --git a/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--light.png b/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--light.png index a7dbcb4a0693a..447fa7b473966 100644 Binary files a/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--light.png and b/frontend/__snapshots__/components-html-elements-display--with-uniqueness-check--light.png differ diff --git a/frontend/__snapshots__/components-itemperformanceevent--default--dark.png b/frontend/__snapshots__/components-itemperformanceevent--default--dark.png index 738adb9ae4874..0d632d9aebd7c 100644 Binary files a/frontend/__snapshots__/components-itemperformanceevent--default--dark.png and b/frontend/__snapshots__/components-itemperformanceevent--default--dark.png differ diff --git a/frontend/__snapshots__/components-itemperformanceevent--default--light.png b/frontend/__snapshots__/components-itemperformanceevent--default--light.png index def2f72b7e957..ff64856c25b41 100644 Binary files a/frontend/__snapshots__/components-itemperformanceevent--default--light.png and b/frontend/__snapshots__/components-itemperformanceevent--default--light.png differ diff --git a/frontend/__snapshots__/components-itemperformanceevent--no-performance-observer-captured-data--dark.png b/frontend/__snapshots__/components-itemperformanceevent--no-performance-observer-captured-data--dark.png index e42315417d6a5..8223b1cdc388e 100644 Binary files a/frontend/__snapshots__/components-itemperformanceevent--no-performance-observer-captured-data--dark.png and b/frontend/__snapshots__/components-itemperformanceevent--no-performance-observer-captured-data--dark.png differ diff --git a/frontend/__snapshots__/components-itemperformanceevent--no-performance-observer-captured-data--light.png b/frontend/__snapshots__/components-itemperformanceevent--no-performance-observer-captured-data--light.png index 712bc799ea5b6..a75dbc19890fc 100644 Binary files a/frontend/__snapshots__/components-itemperformanceevent--no-performance-observer-captured-data--light.png and b/frontend/__snapshots__/components-itemperformanceevent--no-performance-observer-captured-data--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--default--dark.png b/frontend/__snapshots__/components-playerinspector-itemevent--default--dark.png index da00c576592b2..f6b293378bdfa 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--default--dark.png and b/frontend/__snapshots__/components-playerinspector-itemevent--default--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--default--light.png b/frontend/__snapshots__/components-playerinspector-itemevent--default--light.png index 5f40cbab56c12..4befb45504e7c 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--default--light.png and b/frontend/__snapshots__/components-playerinspector-itemevent--default--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--dark.png b/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--dark.png index 3b8d51e0a588b..8b51f9557e6cf 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--dark.png and b/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--light.png b/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--light.png index ff7b2fed50cf8..ade03b226963c 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--light.png and b/frontend/__snapshots__/components-playerinspector-itemevent--group-identify-event--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-current-url--dark.png b/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-current-url--dark.png index 31e8fe4fe4b99..d029935f201bc 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-current-url--dark.png and b/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-current-url--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-current-url--light.png b/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-current-url--light.png index d1eece3a24ae4..820708ec8b75b 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-current-url--light.png and b/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-current-url--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-path--dark.png b/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-path--dark.png index 7f12ee70567fe..3a560282282e4 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-path--dark.png and b/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-path--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-path--light.png b/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-path--light.png index c872b968a5880..1766d60a0cc06 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-path--light.png and b/frontend/__snapshots__/components-playerinspector-itemevent--page-view-with-path--light.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--web-vitals-event--dark.png b/frontend/__snapshots__/components-playerinspector-itemevent--web-vitals-event--dark.png index 564cb99e745f6..b1dd2533af2ad 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--web-vitals-event--dark.png and b/frontend/__snapshots__/components-playerinspector-itemevent--web-vitals-event--dark.png differ diff --git a/frontend/__snapshots__/components-playerinspector-itemevent--web-vitals-event--light.png b/frontend/__snapshots__/components-playerinspector-itemevent--web-vitals-event--light.png index cff106a904abb..1bb28c6fdabe3 100644 Binary files a/frontend/__snapshots__/components-playerinspector-itemevent--web-vitals-event--light.png and b/frontend/__snapshots__/components-playerinspector-itemevent--web-vitals-event--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png index fbed47ce7ca51..5db2152719be9 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png index b9f2d52563e97..0aad92c68ce5f 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--dark.png index eff54e9dbb3cd..f998a067b9bb7 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--light.png index 437ff67116e58..bdc7b702121e2 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page-iff-legacy-sources--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page-iff-legacy-sources--dark.png index ad471b379e81d..ba1a8e29cda5e 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page-iff-legacy-sources--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page-iff-legacy-sources--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page-iff-legacy-sources--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page-iff-legacy-sources--light.png index bf607bc414337..198a089bd0ce5 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page-iff-legacy-sources--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page-iff-legacy-sources--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-overview-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-overview-page--dark.png index eff54e9dbb3cd..f998a067b9bb7 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-overview-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-overview-page--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-overview-page--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-overview-page--light.png index 437ff67116e58..bdc7b702121e2 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-overview-page--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-overview-page--light.png differ diff --git a/frontend/__snapshots__/scenes-other-products--products--dark.png b/frontend/__snapshots__/scenes-other-products--products--dark.png index df94ff5a60981..caa1dcb27f4f8 100644 Binary files a/frontend/__snapshots__/scenes-other-products--products--dark.png and b/frontend/__snapshots__/scenes-other-products--products--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-products--products--light.png b/frontend/__snapshots__/scenes-other-products--products--light.png index 44c254800cedd..0715bb4784947 100644 Binary files a/frontend/__snapshots__/scenes-other-products--products--light.png and b/frontend/__snapshots__/scenes-other-products--products--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png index 61e1ae4455029..23e20084b753d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png index 6d2dbfc7b2604..f7b7f6ebd06e6 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png index eba5db3e8447b..eb8542bfdcfcc 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png index d12a21abba064..999b15cfdd917 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png index 61e1ae4455029..23e20084b753d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png index 6d2dbfc7b2604..f7b7f6ebd06e6 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png index 61e1ae4455029..23e20084b753d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png index 6d2dbfc7b2604..f7b7f6ebd06e6 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png index 61e1ae4455029..23e20084b753d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png index 6d2dbfc7b2604..f7b7f6ebd06e6 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png index 61e1ae4455029..23e20084b753d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png index 6d2dbfc7b2604..f7b7f6ebd06e6 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png index 61e1ae4455029..23e20084b753d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png index 6d2dbfc7b2604..f7b7f6ebd06e6 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png index 61e1ae4455029..23e20084b753d 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png index 6d2dbfc7b2604..f7b7f6ebd06e6 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png index 3f4df5f5aef57..7da712a894a4c 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png index 5feb120039c45..cadfa35ff0a63 100644 Binary files a/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-web-vitals--light.png differ diff --git a/frontend/public/services/pineapple.png b/frontend/public/services/pineapple.png new file mode 100644 index 0000000000000..3fd475fb500d2 Binary files /dev/null and b/frontend/public/services/pineapple.png differ diff --git a/frontend/src/initKea.ts b/frontend/src/initKea.ts index d3051cb01a06d..99a00f2e255d4 100644 --- a/frontend/src/initKea.ts +++ b/frontend/src/initKea.ts @@ -64,7 +64,7 @@ export const loggerPlugin: () => KeaPlugin = () => ({ export function initKea({ routerHistory, routerLocation, beforePlugins }: InitKeaProps = {}): void { const plugins = [ ...(beforePlugins || []), - localStoragePlugin, + localStoragePlugin(), windowValuesPlugin({ window: window }), routerPlugin({ history: routerHistory, diff --git a/frontend/src/layout/navigation-3000/components/Navbar.tsx b/frontend/src/layout/navigation-3000/components/Navbar.tsx index c0c641227828d..e287b2e06220e 100644 --- a/frontend/src/layout/navigation-3000/components/Navbar.tsx +++ b/frontend/src/layout/navigation-3000/components/Navbar.tsx @@ -59,7 +59,7 @@ export function Navbar(): JSX.Element { ? () => { if ( activeNavbarItemId === item.identifier && - isSidebarShown + !isSidebarShown ) { hideSidebar() } else { diff --git a/frontend/src/layout/navigation-3000/navigationLogic.tsx b/frontend/src/layout/navigation-3000/navigationLogic.tsx index 4a81a00349ca9..be44236c6dc9b 100644 --- a/frontend/src/layout/navigation-3000/navigationLogic.tsx +++ b/frontend/src/layout/navigation-3000/navigationLogic.tsx @@ -512,7 +512,7 @@ export const navigation3000Logic = kea([ featureFlags[FEATURE_FLAGS.SQL_EDITOR] ? { identifier: Scene.SQLEditor, - label: 'Data warehouse', + label: 'Data warehouse 3000', icon: , to: urls.sqlEditor(), logic: editorSidebarLogic, diff --git a/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx b/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx index 82eef61c1ae6e..66d480491172a 100644 --- a/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx +++ b/frontend/src/layout/navigation-3000/sidebars/featureFlags.tsx @@ -7,9 +7,9 @@ import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { featureFlagLogic } from 'scenes/feature-flags/featureFlagLogic' import { groupFilters } from 'scenes/feature-flags/FeatureFlags' import { featureFlagsLogic } from 'scenes/feature-flags/featureFlagsLogic' +import { projectLogic } from 'scenes/projectLogic' import { sceneLogic } from 'scenes/sceneLogic' import { Scene } from 'scenes/sceneTypes' -import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { groupsModel } from '~/models/groupsModel' @@ -35,8 +35,8 @@ export const featureFlagsSidebarLogic = kea([ values: [ featureFlagsLogic, ['featureFlags', 'featureFlagsLoading'], - teamLogic, - ['currentTeamId'], + projectLogic, + ['currentProjectId'], sceneLogic, ['activeScene', 'sceneParams'], groupsModel, @@ -46,8 +46,8 @@ export const featureFlagsSidebarLogic = kea([ }), selectors(({ actions }) => ({ contents: [ - (s) => [s.relevantFeatureFlags, s.featureFlagsLoading, s.currentTeamId, s.aggregationLabel], - (relevantFeatureFlags, featureFlagsLoading, currentTeamId, aggregationLabel) => [ + (s) => [s.relevantFeatureFlags, s.featureFlagsLoading, s.currentProjectId, s.aggregationLabel], + (relevantFeatureFlags, featureFlagsLoading, currentProjectId, aggregationLabel) => [ { key: 'feature-flags', noun: 'feature flag', @@ -147,7 +147,7 @@ export const featureFlagsSidebarLogic = kea([ label: 'Delete feature flag', onClick: () => { void deleteWithUndo({ - endpoint: `projects/${currentTeamId}/feature_flags`, + endpoint: `projects/${currentProjectId}/feature_flags`, object: { name: featureFlag.key, id: featureFlag.id }, callback: () => { actions.loadFeatureFlags() diff --git a/frontend/src/layout/navigation-3000/sidebars/insights.ts b/frontend/src/layout/navigation-3000/sidebars/insights.ts index c94b3df3ab23a..786d03def03e6 100644 --- a/frontend/src/layout/navigation-3000/sidebars/insights.ts +++ b/frontend/src/layout/navigation-3000/sidebars/insights.ts @@ -2,10 +2,10 @@ import { afterMount, connect, kea, listeners, path, reducers, selectors } from ' import { subscriptions } from 'kea-subscriptions' import { deleteInsightWithUndo } from 'lib/utils/deleteWithUndo' import { insightsApi } from 'scenes/insights/utils/api' +import { projectLogic } from 'scenes/projectLogic' import { INSIGHTS_PER_PAGE, savedInsightsLogic } from 'scenes/saved-insights/savedInsightsLogic' import { sceneLogic } from 'scenes/sceneLogic' import { Scene } from 'scenes/sceneTypes' -import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { navigation3000Logic } from '~/layout/navigation-3000/navigationLogic' @@ -46,8 +46,8 @@ export const insightsSidebarLogic = kea([ })), selectors(({ actions, values, cache }) => ({ contents: [ - (s) => [s.insights, s.infiniteInsights, s.insightsLoading, teamLogic.selectors.currentTeamId], - (insights, infiniteInsights, insightsLoading, currentTeamId) => [ + (s) => [s.insights, s.infiniteInsights, s.insightsLoading, projectLogic.selectors.currentProjectId], + (insights, infiniteInsights, insightsLoading, currentProjectId) => [ { key: 'insights', noun: 'insight', @@ -92,7 +92,7 @@ export const insightsSidebarLogic = kea([ onClick: () => { void deleteInsightWithUndo({ object: insight, - endpoint: `projects/${currentTeamId}/insights`, + endpoint: `projects/${currentProjectId}/insights`, callback: actions.loadInsights, }) }, diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx index e2a235881da6a..69554124d4fbf 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/activity/sidePanelActivityLogic.tsx @@ -8,7 +8,7 @@ import { dayjs } from 'lib/dayjs' import { LemonMarkdown } from 'lib/lemon-ui/LemonMarkdown' import { toParams } from 'lib/utils' import posthog from 'posthog-js' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { ActivityFilters, activityForSceneLogic } from './activityForSceneLogic' import type { sidePanelActivityLogicType } from './sidePanelActivityLogicType' @@ -34,7 +34,7 @@ export enum SidePanelActivityTab { export const sidePanelActivityLogic = kea([ path(['scenes', 'navigation', 'sidepanel', 'sidePanelActivityLogic']), connect({ - values: [activityForSceneLogic, ['sceneActivityFilters']], + values: [activityForSceneLogic, ['sceneActivityFilters'], projectLogic, ['currentProjectId']], }), actions({ togglePolling: (pageIsVisible: boolean) => ({ pageIsVisible }), @@ -104,7 +104,7 @@ export const sidePanelActivityLogic = kea([ } await api.create( - `api/projects/${teamLogic.values.currentTeamId}/activity_log/bookmark_activity_notification`, + `api/projects/${values.currentProjectId}/activity_log/bookmark_activity_notification`, { bookmark: latestNotification.created_at.toISOString(), } @@ -123,7 +123,7 @@ export const sidePanelActivityLogic = kea([ try { const response = await api.get( - `api/projects/${teamLogic.values.currentTeamId}/activity_log/important_changes?` + + `api/projects/${values.currentProjectId}/activity_log/important_changes?` + toParams({ unread: onlyUnread }) ) diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 4c10f7c0660e5..7be5df3d764d6 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -41,7 +41,6 @@ import { DataWarehouseTable, DataWarehouseViewLink, EarlyAccessFeatureType, - ErrorClusterResponse, EventDefinition, EventDefinitionType, EventsListQueryParams, @@ -1765,11 +1764,17 @@ const api = { }, }, hogFunctions: { - async list(params?: { - filters?: any - type?: HogFunctionTypeType - }): Promise> { - return await new ApiRequest().hogFunctions().withQueryString(params).get() + async list( + filters?: any, + type?: HogFunctionTypeType | HogFunctionTypeType[] + ): Promise> { + return await new ApiRequest() + .hogFunctions() + .withQueryString({ + filters: filters, + ...(type ? (Array.isArray(type) ? { types: type.join(',') } : { type }) : {}), + }) + .get() }, async get(id: HogFunctionType['id']): Promise { return await new ApiRequest().hogFunction(id).get() @@ -1798,10 +1803,12 @@ const api = { ): Promise { return await new ApiRequest().hogFunction(id).withAction('metrics/totals').withQueryString(params).get() }, - async listTemplates(type?: HogFunctionTypeType): Promise> { + async listTemplates( + type?: HogFunctionTypeType | HogFunctionTypeType[] + ): Promise> { return new ApiRequest() .hogFunctionTemplates() - .withQueryString({ type: type ?? 'destination' }) + .withQueryString(Array.isArray(type) ? { types: type.join(',') } : { type: type ?? 'destination' }) .get() }, async getTemplate(id: HogFunctionTemplateType['id']): Promise { @@ -1931,10 +1938,6 @@ const api = { return await new ApiRequest().recording(recordingId).withAction('similar_sessions').get() }, - async errorClusters(refresh?: boolean): Promise { - return await new ApiRequest().recordings().withAction('error_clusters').withQueryString({ refresh }).get() - }, - async delete(recordingId: SessionRecordingType['id']): Promise<{ success: boolean }> { return await new ApiRequest().recording(recordingId).delete() }, @@ -2230,7 +2233,7 @@ const api = { async get(viewId: DataWarehouseSavedQuery['id']): Promise { return await new ApiRequest().dataWarehouseSavedQuery(viewId).get() }, - async create(data: Partial): Promise { + async create(data: Partial & { types: string[][] }): Promise { return await new ApiRequest().dataWarehouseSavedQueries().create({ data }) }, async delete(viewId: DataWarehouseSavedQuery['id']): Promise { @@ -2238,7 +2241,7 @@ const api = { }, async update( viewId: DataWarehouseSavedQuery['id'], - data: Partial + data: Partial & { types: string[][] } ): Promise { return await new ApiRequest().dataWarehouseSavedQuery(viewId).update({ data }) }, @@ -2356,7 +2359,12 @@ const api = { viewId: DataWarehouseViewLink['id'], data: Pick< DataWarehouseViewLink, - 'source_table_name' | 'source_table_key' | 'joining_table_name' | 'joining_table_key' | 'field_name' + | 'source_table_name' + | 'source_table_key' + | 'joining_table_name' + | 'joining_table_key' + | 'field_name' + | 'configuration' > ): Promise { return await new ApiRequest().dataWarehouseViewLink(viewId).update({ data }) diff --git a/frontend/src/lib/components/DefinitionPopover/definitionPopoverLogic.ts b/frontend/src/lib/components/DefinitionPopover/definitionPopoverLogic.ts index 9781c1e8e2bbd..c85c57d67a883 100644 --- a/frontend/src/lib/components/DefinitionPopover/definitionPopoverLogic.ts +++ b/frontend/src/lib/components/DefinitionPopover/definitionPopoverLogic.ts @@ -134,9 +134,11 @@ export const definitionPopoverLogic = kea([ } if (!('distinct_id_field' in item)) { - const idField = Object.values(warehouseItem.fields).find((n) => n.name === 'id') - if (idField) { - warehouseItem['distinct_id_field'] = idField.name + const distinctIdField = + Object.values(warehouseItem.fields).find((n) => n.name === 'distinct_id') ?? + Object.values(warehouseItem.fields).find((n) => n.name === 'id') + if (distinctIdField) { + warehouseItem['distinct_id_field'] = distinctIdField.name } } diff --git a/frontend/src/lib/components/HTMLElementsDisplay/HTMLElementsDisplay.stories.tsx b/frontend/src/lib/components/HTMLElementsDisplay/HTMLElementsDisplay.stories.tsx index bb26f35d20a31..7cbfdd5a0a28c 100644 --- a/frontend/src/lib/components/HTMLElementsDisplay/HTMLElementsDisplay.stories.tsx +++ b/frontend/src/lib/components/HTMLElementsDisplay/HTMLElementsDisplay.stories.tsx @@ -181,6 +181,10 @@ export function ReadOnlyDisplay(): JSX.Element { return } +export function SmallReadOnlyDisplay(): JSX.Element { + return +} + export function WithoutCentralHighlightDisplay(): JSX.Element { return } @@ -212,3 +216,15 @@ export function EditableDisplayWithPreselection(): JSX.Element { export function WithUniquenessCheck(): JSX.Element { return } + +export function SmallWithUniquenessCheck(): JSX.Element { + return ( + + ) +} diff --git a/frontend/src/lib/components/HTMLElementsDisplay/HTMLElementsDisplay.tsx b/frontend/src/lib/components/HTMLElementsDisplay/HTMLElementsDisplay.tsx index 4e2c0b4c8809b..c00bca5b711d1 100644 --- a/frontend/src/lib/components/HTMLElementsDisplay/HTMLElementsDisplay.tsx +++ b/frontend/src/lib/components/HTMLElementsDisplay/HTMLElementsDisplay.tsx @@ -1,3 +1,4 @@ +import clsx from 'clsx' import { useActions, useValues } from 'kea' import { htmlElementsDisplayLogic } from 'lib/components/HTMLElementsDisplay/htmlElementsDisplayLogic' import { ParsedCSSSelector } from 'lib/components/HTMLElementsDisplay/preselectWithCSS' @@ -13,7 +14,13 @@ function indent(level: number): string { return Array(level).fill(' ').join('') } -function CloseAllTags({ elements }: { elements: ElementType[] }): JSX.Element { +function CloseAllTags({ + elements, + size = 'small', +}: { + elements: ElementType[] + size?: 'small' | 'xsmall' +}): JSX.Element { return ( <> {[...elements] @@ -28,7 +35,10 @@ function CloseAllTags({ elements }: { elements: ElementType[] }): JSX.Element { }} >
                             {indent(elements.length - index - 2)}
@@ -47,6 +57,7 @@ function Tags({
     editable,
     onChange,
     selectedText,
+    size = 'small',
 }: {
     elements: ElementType[]
     parsedCSSSelectors: Record
@@ -54,6 +65,7 @@ function Tags({
     editable: boolean
     onChange: (i: number, s: ParsedCSSSelector) => void
     selectedText?: string
+    size?: 'small' | 'xsmall'
 }): JSX.Element {
     return (
         <>
@@ -78,6 +90,7 @@ function Tags({
                             highlight={highlight}
                             parsedCSSSelector={parsedCSSSelectors[index]}
                             selectedText={selectedText}
+                            size={size}
                         />
                     
                 )
@@ -92,6 +105,7 @@ interface HTMLElementsDisplayPropsBase {
     elements: ElementType[]
     highlight?: boolean
     selectedText?: string
+    size?: 'small' | 'xsmall'
 }
 
 type HTMLElementsDisplayProps =
@@ -119,6 +133,7 @@ export function HTMLElementsDisplay({
     highlight = true,
     editable = false,
     checkUniqueness = false,
+    size = 'small',
 }: HTMLElementsDisplayProps): JSX.Element {
     const [key] = useState(() => `HtmlElementsDisplay.${uniqueNode++}`)
 
@@ -137,12 +152,12 @@ export function HTMLElementsDisplay({
     const { setParsedSelectors, showAdditionalElements } = useActions(logic)
 
     return (
-        
+
{editable && !!parsedElements.length && (
Selector:
-
{chosenSelector}
+
{chosenSelector}
)} @@ -161,7 +176,10 @@ export function HTMLElementsDisplay({ <> {elementsToShowDepth ? (
@@ -177,8 +195,9 @@ export function HTMLElementsDisplay({
                             parsedCSSSelectors={parsedSelectors}
                             onChange={(index, s) => setParsedSelectors({ ...parsedSelectors, [index]: s })}
                             selectedText={selectedText}
+                            size={size}
                         />
-                        
+                        
                     
                 ) : (
                     
No elements to display
diff --git a/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.tsx b/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.tsx index 53a0fcb00d839..d5ce773e17b23 100644 --- a/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.tsx +++ b/frontend/src/lib/components/HTMLElementsDisplay/SelectableElement.tsx @@ -193,6 +193,7 @@ export function SelectableElement({ highlight, parsedCSSSelector, selectedText, + size = 'small', }: { element: ElementType isDeepestChild: boolean @@ -202,6 +203,7 @@ export function SelectableElement({ highlight?: boolean parsedCSSSelector?: ParsedCSSSelector selectedText?: string + size?: 'small' | 'xsmall' }): JSX.Element { const setParsedCSSSelector = (newParsedCSSSelector: ParsedCSSSelector): void => { if (!objectsEqual(newParsedCSSSelector, parsedCSSSelector)) { @@ -212,8 +214,9 @@ export function SelectableElement({ return (
             {indent}
diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts
index 75086da6c1cfe..a9389640cbeb6 100644
--- a/frontend/src/lib/components/Support/supportLogic.ts
+++ b/frontend/src/lib/components/Support/supportLogic.ts
@@ -143,6 +143,11 @@ export const TARGET_AREA_TO_NAME = [
                 'data-attr': `support-form-target-area-data_warehouse`,
                 label: 'Data warehouse',
             },
+            {
+                value: 'batch_exports',
+                'data-attr': `support-form-target-area-batch-exports`,
+                label: 'Batch exports',
+            },
             {
                 value: 'feature_flags',
                 'data-attr': `support-form-target-area-feature_flags`,
diff --git a/frontend/src/lib/components/UniversalFilters/UniversalFilters.tsx b/frontend/src/lib/components/UniversalFilters/UniversalFilters.tsx
index 117c6b678c59e..a253805e75260 100644
--- a/frontend/src/lib/components/UniversalFilters/UniversalFilters.tsx
+++ b/frontend/src/lib/components/UniversalFilters/UniversalFilters.tsx
@@ -3,6 +3,7 @@ import { LemonButton, LemonButtonProps, LemonDropdown, Popover } from '@posthog/
 import { BindLogic, useActions, useValues } from 'kea'
 import { useState } from 'react'
 
+import { AnyDataNode } from '~/queries/schema'
 import { UniversalFiltersGroup, UniversalFilterValue } from '~/types'
 
 import { TaxonomicPropertyFilter } from '../PropertyFilters/components/TaxonomicPropertyFilter'
@@ -75,12 +76,14 @@ const Value = ({
     onChange,
     onRemove,
     initiallyOpen = false,
+    metadataSource,
 }: {
     index: number
     filter: UniversalFilterValue
     onChange: (property: UniversalFilterValue) => void
     onRemove: () => void
     initiallyOpen?: boolean
+    metadataSource?: AnyDataNode
 }): JSX.Element => {
     const { rootKey, taxonomicPropertyFilterGroupTypes } = useValues(universalFiltersLogic)
 
@@ -103,6 +106,7 @@ const Value = ({
                         onChange={(properties) => onChange({ ...filter, properties })}
                         disablePopover
                         taxonomicGroupTypes={[TaxonomicFilterGroupType.EventProperties]}
+                        metadataSource={metadataSource}
                     />
                 ) : isEditable ? (
                     ([
                 newValues.push(newFeatureFlagFilter)
             } else {
                 const propertyType =
-                    item.propertyFilterType ?? taxonomicFilterTypeToPropertyFilterType(taxonomicGroup.type)
+                    item?.propertyFilterType ?? taxonomicFilterTypeToPropertyFilterType(taxonomicGroup.type)
                 if (propertyKey && propertyType) {
                     const newPropertyFilter = createDefaultPropertyFilter(
                         {},
diff --git a/frontend/src/lib/components/ViewRecordingButton.tsx b/frontend/src/lib/components/ViewRecordingButton.tsx
new file mode 100644
index 0000000000000..1d0c7adb1d4b0
--- /dev/null
+++ b/frontend/src/lib/components/ViewRecordingButton.tsx
@@ -0,0 +1,41 @@
+import { LemonButton, LemonButtonProps } from '@posthog/lemon-ui'
+import { useActions } from 'kea'
+import { Dayjs, dayjs } from 'lib/dayjs'
+import { IconPlayCircle } from 'lib/lemon-ui/icons'
+import { sessionPlayerModalLogic } from 'scenes/session-recordings/player/modal/sessionPlayerModalLogic'
+import { urls } from 'scenes/urls'
+
+import { EventType } from '~/types'
+
+export default function ViewRecordingButton({
+    sessionId,
+    timestamp,
+    ...props
+}: Pick & {
+    sessionId: string
+    timestamp?: string | Dayjs
+}): JSX.Element {
+    const { openSessionPlayer } = useActions(sessionPlayerModalLogic)
+
+    return (
+         {
+                const fiveSecondsBeforeEvent = dayjs(timestamp).valueOf() - 5000
+                openSessionPlayer({ id: sessionId }, Math.max(fiveSecondsBeforeEvent, 0))
+            }}
+            sideIcon={}
+            {...props}
+        >
+            View recording
+        
+    )
+}
+
+export const mightHaveRecording = (properties: EventType['properties']): boolean => {
+    return properties.$session_id
+        ? properties.$recording_status
+            ? properties.$recording_status === 'active'
+            : true
+        : false
+}
diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx
index 3a0c3436faa9f..60e6358245b81 100644
--- a/frontend/src/lib/constants.tsx
+++ b/frontend/src/lib/constants.tsx
@@ -179,10 +179,8 @@ export const FEATURE_FLAGS = {
     PRODUCT_INTRO_PAGES: 'product-intro-pages', // owner: @raquelmsmith
     SQL_EDITOR: 'sql-editor', // owner: @EDsCODE #team-data-warehouse
     SESSION_REPLAY_DOCTOR: 'session-replay-doctor', // owner: #team-replay
-    REPLAY_SIMILAR_RECORDINGS: 'session-replay-similar-recordings', // owner: #team-replay
     SAVED_NOT_PINNED: 'saved-not-pinned', // owner: #team-replay
     NEW_EXPERIMENTS_UI: 'new-experiments-ui', // owner: @jurajmajerik #team-feature-success
-    REPLAY_ERROR_CLUSTERING: 'session-replay-error-clustering', // owner: #team-replay
     AUDIT_LOGS_ACCESS: 'audit-logs-access', // owner: #team-growth
     SUBSCRIBE_FROM_PAYGATE: 'subscribe-from-paygate', // owner: #team-growth
     HEATMAPS_UI: 'heatmaps-ui', // owner: @benjackwhite
@@ -225,15 +223,18 @@ export const FEATURE_FLAGS = {
     MESSAGING: 'messaging', // owner @mariusandra #team-cdp
     SESSION_REPLAY_URL_BLOCKLIST: 'session-replay-url-blocklist', // owner: @richard-better #team-replay
     BILLING_TRIAL_FLOW: 'billing-trial-flow', // owner: @zach
-    DEAD_CLICKS_AUTOCAPTURE: 'dead-clicks-autocapture', // owner: @pauldambra #team-replay
     EDIT_DWH_SOURCE_CONFIG: 'edit_dwh_source_config', // owner: @Gilbert09 #team-data-warehouse
     AI_SURVEY_RESPONSE_SUMMARY: 'ai-survey-response-summary', // owner: @pauldambra
     CUSTOM_CHANNEL_TYPE_RULES: 'custom-channel-type-rules', // owner: @robbie-c #team-web-analytics
     SELF_SERVE_CREDIT_OVERRIDE: 'self-serve-credit-override', // owner: @zach
-    EXPERIMENTS_MIGRATION_DISABLE_UI: 'experiments-migration-disable-ui', // owner: @jurajmajerik #team-experiments
     CUSTOM_CSS_THEMES: 'custom-css-themes', // owner: @daibhin
+    EXPERIMENTS_MULTIPLE_METRICS: 'experiments-multiple-metrics', // owner: @jurajmajerik #team-experiments
     WEB_ANALYTICS_WARN_CUSTOM_EVENT_NO_SESSION: 'web-analytics-warn-custom-event-no-session', // owner: @robbie-c #team-web-analytics
     TWO_FACTOR_UI: 'two-factor-ui', // owner: @zach
+    SITE_DESTINATIONS: 'site-destinations', // owner: @mariusandra #team-cdp
+    SITE_APP_FUNCTIONS: 'site-app-functions', // owner: @mariusandra #team-cdp
+    REPLAY_HOGQL_FILTERS: 'replay-hogql-filters', // owner: @pauldambra #team-replay
+    REPLAY_LIST_RECORDINGS_AS_QUERY: 'replay-list-recordings-as-query', // owner: @pauldambra #team-replay
 } as const
 export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS]
 
diff --git a/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx b/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx
index 3129a068d64c9..16b6a2bc7830f 100644
--- a/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx
+++ b/frontend/src/lib/lemon-ui/LemonInputSelect/LemonInputSelect.tsx
@@ -249,22 +249,25 @@ export function LemonInputSelect({
     }
 
     const _onBlur = (): void => {
-        // We need to add a delay as a click could be in the popover or the input wrapper which refocuses
-        setTimeout(() => {
-            if (popoverFocusRef.current) {
-                popoverFocusRef.current = false
-                inputRef.current?.focus()
-                _onFocus()
-                return
-            }
-            if (allowCustomValues && inputValue.trim() && !values.includes(inputValue)) {
+        const hasSelectedAutofilledValue = selectedIndex > 0
+        const hasCustomValue =
+            !hasSelectedAutofilledValue && allowCustomValues && inputValue.trim() && !values.includes(inputValue)
+        if (popoverFocusRef.current) {
+            popoverFocusRef.current = false
+            inputRef.current?.focus()
+            _onFocus()
+            if (hasCustomValue) {
                 _onActionItem(inputValue.trim(), null)
-            } else {
-                setInputValue('')
             }
-            setShowPopover(false)
-            onBlur?.()
-        }, 100)
+            return
+        }
+        if (hasCustomValue) {
+            _onActionItem(inputValue.trim(), null)
+        } else {
+            setInputValue('')
+        }
+        setShowPopover(false)
+        onBlur?.()
     }
 
     const _onFocus = (): void => {
diff --git a/frontend/src/lib/lemon-ui/Tooltip/Tooltip.tsx b/frontend/src/lib/lemon-ui/Tooltip/Tooltip.tsx
index eb3e95542cc82..a9c36d95d6ad8 100644
--- a/frontend/src/lib/lemon-ui/Tooltip/Tooltip.tsx
+++ b/frontend/src/lib/lemon-ui/Tooltip/Tooltip.tsx
@@ -32,6 +32,7 @@ export interface TooltipProps {
     placement?: Placement
     className?: string
     visible?: boolean
+    interactive?: boolean
 }
 
 export function Tooltip({
@@ -42,14 +43,16 @@ export function Tooltip({
     offset = 8,
     arrowOffset,
     delayMs = 500,
-    closeDelayMs = 0, // Set this to some delay to ensure the content stays open when hovered
+    closeDelayMs = 100, // Slight delay to ensure smooth transition
+    interactive = false,
     visible: controlledOpen,
 }: TooltipProps): JSX.Element {
     const [uncontrolledOpen, setUncontrolledOpen] = useState(false)
+    const [isHoveringTooltip, setIsHoveringTooltip] = useState(false) // Track tooltip hover state
     const caretRef = useRef(null)
     const floatingContainer = useFloatingContainer()
 
-    const open = controlledOpen ?? uncontrolledOpen
+    const open = controlledOpen ?? (uncontrolledOpen || isHoveringTooltip)
 
     const { context, refs } = useFloating({
         placement,
@@ -116,7 +119,10 @@ export function Tooltip({
                         className="Tooltip max-w-sm"
                         // eslint-disable-next-line react/forbid-dom-props
                         style={{ ...context.floatingStyles }}
-                        {...getFloatingProps()}
+                        {...getFloatingProps({
+                            onMouseEnter: () => interactive && setIsHoveringTooltip(true), // Keep tooltip open
+                            onMouseLeave: () => interactive && setIsHoveringTooltip(false), // Allow closing
+                        })}
                     >
                         
- Remaining rate limit tokens for the posthog-js library client-side rate limiting implementation. - - ), + description: + 'Remaining rate limit tokens for the posthog-js library client-side rate limiting implementation.', examples: ['100'], }, token: { label: 'Token', - description: Token used for authentication., + description: 'Token used for authentication.', examples: ['ph_abcdefg'], }, $ce_version: { @@ -335,12 +332,12 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { // session recording $replay_minimum_duration: { label: 'Replay config - minimum duration', - description: Config for minimum duration before emitting a session recording., + description: 'Config for minimum duration before emitting a session recording.', examples: ['1000'], }, $replay_sample_rate: { label: 'Replay config - sample rate', - description: Config for sampling rate of session recordings., + description: 'Config for sampling rate of session recordings.', examples: ['0.1'], }, $console_log_recording_enabled_server_side: { @@ -356,40 +353,39 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { }, $session_recording_start_reason: { label: 'Session recording start reason', - description: ( - - Reason for starting the session recording. Useful for e.g. if you have sampling enabled and want to - see on batch exported events which sessions have recordings available. - - ), + description: + 'Reason for starting the session recording. Useful for e.g. if you have sampling enabled and want to see on batch exported events which sessions have recordings available.', examples: ['sampling_override', 'recording_initialized', 'linked_flag_match'], }, $session_recording_canvas_recording: { label: 'Session recording canvas recording', - description: Session recording canvas capture config., + description: 'Session recording canvas capture config.', examples: ['{"enabled": false}'], }, $session_recording_network_payload_capture: { label: 'Session recording network payload capture', - description: Session recording network payload capture config., + description: 'Session recording network payload capture config.', examples: ['{"recordHeaders": false}'], }, + $configured_session_timeout_ms: { + label: 'Configured session timeout', + description: 'Configured session timeout in milliseconds.', + examples: ['1800000'], + }, + $replay_script_config: { + label: 'Replay script config', + description: 'Sets an alternative recorder script for the web sdk.', + examples: ['{"script": "recorder-next""}'], + }, $session_recording_url_trigger_activated_session: { label: 'Session recording URL trigger activated session', - description: ( - - Session recording URL trigger activated session config. Used by posthog-js to track URL activation - of session replay. - - ), + description: + 'Session recording URL trigger activated session config. Used by posthog-js to track URL activation of session replay.', }, $session_recording_url_trigger_status: { label: 'Session recording URL trigger status', - description: ( - - Session recording URL trigger status. Used by posthog-js to track URL activation of session replay. - - ), + description: + 'Session recording URL trigger status. Used by posthog-js to track URL activation of session replay.', }, $recording_status: { label: 'Session recording status', @@ -464,17 +460,17 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { }, $exception_capture_endpoint: { label: 'Exception capture endpoint', - description: Endpoint used by posthog-js exception autocapture., + description: 'Endpoint used by posthog-js exception autocapture.', examples: ['/e/'], }, $exception_capture_endpoint_suffix: { label: 'Exception capture endpoint', - description: Endpoint used by posthog-js exception autocapture., + description: 'Endpoint used by posthog-js exception autocapture.', examples: ['/e/'], }, $exception_capture_enabled_server_side: { label: 'Exception capture enabled server side', - description: Whether exception autocapture was enabled in remote config., + description: 'Whether exception autocapture was enabled in remote config.', }, // GeoIP @@ -938,6 +934,11 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { description: 'What the call to feature flag responded with.', examples: ['true', 'false'], }, + $feature_flag_payload: { + label: 'Feature Flag Response Payload', + description: 'The JSON payload that the call to feature flag responded with (if any)', + examples: ['{"variant": "test"}'], + }, $feature_flag: { label: 'Feature Flag', description: ( @@ -1193,7 +1194,7 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { }, $web_vitals_allowed_metrics: { label: 'Web vitals allowed metrics', - description: Allowed web vitals metrics config., + description: 'Allowed web vitals metrics config.', examples: ['["LCP", "CLS"]'], }, @@ -1319,72 +1320,72 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { }, $start_timestamp: { label: 'Start timestamp', - description: The timestamp of the first event from this session., + description: 'The timestamp of the first event from this session.', examples: [new Date().toISOString()], }, $end_timestamp: { label: 'End timestamp', - description: The timestamp of the last event from this session, + description: 'The timestamp of the last event from this session', examples: [new Date().toISOString()], }, $entry_current_url: { label: 'Entry URL', - description: The first URL visited in this session, + description: 'The first URL visited in this session.', examples: ['https://example.com/interesting-article?parameter=true'], }, $entry_pathname: { label: 'Entry pathname', - description: The first pathname visited in this session, + description: 'The first pathname visited in this session.', examples: ['/interesting-article?parameter=true'], }, $end_current_url: { label: 'Entry URL', - description: The first URL visited in this session, + description: 'The first URL visited in this session.', examples: ['https://example.com/interesting-article?parameter=true'], }, $end_pathname: { label: 'Entry pathname', - description: The first pathname visited in this session, + description: 'The first pathname visited in this session.', examples: ['/interesting-article?parameter=true'], }, $exit_current_url: { label: 'Exit URL', - description: The last URL visited in this session, + description: 'The last URL visited in this session.', examples: ['https://example.com/interesting-article?parameter=true'], }, $exit_pathname: { label: 'Exit pathname', - description: The last pathname visited in this session, + description: 'The last pathname visited in this session.', examples: ['https://example.com/interesting-article?parameter=true'], }, $pageview_count: { label: 'Pageview count', - description: The number of page view events in this session, + description: 'The number of page view events in this session.', examples: ['123'], }, $autocapture_count: { label: 'Autocapture count', - description: The number of autocapture events in this session, + description: 'The number of autocapture events in this session.', examples: ['123'], }, $screen_count: { label: 'Screen count', - description: The number of screen events in this session, + description: 'The number of screen events in this session.', examples: ['123'], }, $channel_type: { label: 'Channel type', - description: What type of acquisition channel this traffic came from., + description: 'What type of acquisition channel this traffic came from.', examples: ['Paid Search', 'Organic Video', 'Direct'], }, $is_bounce: { label: 'Is bounce', - description: Whether the session was a bounce., + description: 'Whether the session was a bounce.', examples: ['true', 'false'], }, $last_external_click_url: { label: 'Last external click URL', - description: The last external URL clicked in this session, + description: 'The last external URL clicked in this session.', examples: ['https://example.com/interesting-article?parameter=true'], }, $vitals_lcp: { diff --git a/frontend/src/lib/utils.tsx b/frontend/src/lib/utils.tsx index f512aa8e5dd85..15b90eeb5473a 100644 --- a/frontend/src/lib/utils.tsx +++ b/frontend/src/lib/utils.tsx @@ -124,6 +124,10 @@ export function percentage( maximumFractionDigits: number = 2, fixedPrecision: boolean = false ): string { + if (division === Infinity) { + return '∞%' + } + return division.toLocaleString('en-US', { style: 'percent', maximumFractionDigits, diff --git a/frontend/src/lib/utils/event-property-utls.tsx b/frontend/src/lib/utils/event-property-utls.tsx new file mode 100644 index 0000000000000..bd9ffbacb63ae --- /dev/null +++ b/frontend/src/lib/utils/event-property-utls.tsx @@ -0,0 +1,78 @@ +import { Tooltip } from 'lib/lemon-ui/Tooltip' + +import { ElementType } from '~/types' + +interface AutocapturedImage { + src: string | undefined + width: string | undefined + height: string | undefined +} + +export function autocaptureToImage(elements: ElementType[]): null | AutocapturedImage { + const find = elements.find((el) => el.tag_name === 'img') + const image = { + src: find?.attributes?.attr__src, + width: find?.attributes?.attr__width, + height: find?.attributes?.attr__height, + } + return image.src ? image : null +} + +export function AutocaptureImage({ img }: { img: AutocapturedImage }): JSX.Element | null { + if (img) { + return ( +
+ {/* Transparent grid background */} +
+ + Autocapture image src +
+ ) + } + + return null +} + +export function AutocaptureImageTab({ elements }: { elements: ElementType[] }): JSX.Element | null { + const img = autocaptureToImage(elements) + if (img) { + return ( +
+ +
+ ) + } + + return null +} + +export function AutocapturePreviewImage({ + elements, + imgPreviewHeight = '40', +}: { + elements: ElementType[] + imgPreviewHeight?: string +}): JSX.Element | null { + const img = autocaptureToImage(elements) + if (img) { + return ( + }> + Autocapture image src + + ) + } + + return null +} diff --git a/frontend/src/models/groupPropertiesModel.ts b/frontend/src/models/groupPropertiesModel.ts index 3a5839146e186..cd17e4f6b3488 100644 --- a/frontend/src/models/groupPropertiesModel.ts +++ b/frontend/src/models/groupPropertiesModel.ts @@ -2,7 +2,7 @@ import { connect, events, kea, path, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' import { groupsAccessLogic } from 'lib/introductions/groupsAccessLogic' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { GroupTypeProperties, PersonProperty } from '~/types' @@ -11,7 +11,7 @@ import type { groupPropertiesModelType } from './groupPropertiesModelType' export const groupPropertiesModel = kea([ path(['models', 'groupPropertiesModel']), connect({ - values: [teamLogic, ['currentTeamId'], groupsAccessLogic, ['groupsEnabled']], + values: [projectLogic, ['currentProjectId'], groupsAccessLogic, ['groupsEnabled']], }), loaders(({ values }) => ({ allGroupProperties: [ @@ -19,7 +19,7 @@ export const groupPropertiesModel = kea([ { loadAllGroupProperties: async () => { if (values.groupsEnabled) { - return await api.get(`api/projects/${values.currentTeamId}/groups/property_definitions`) + return await api.get(`api/projects/${values.currentProjectId}/groups/property_definitions`) } return {} }, diff --git a/frontend/src/models/groupsModel.ts b/frontend/src/models/groupsModel.ts index 6b52106d88cfb..52270f642f6ce 100644 --- a/frontend/src/models/groupsModel.ts +++ b/frontend/src/models/groupsModel.ts @@ -4,7 +4,7 @@ import { subscriptions } from 'kea-subscriptions' import api from 'lib/api' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { groupsAccessLogic, GroupsAccessStatus } from 'lib/introductions/groupsAccessLogic' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { GroupType, GroupTypeIndex } from '~/types' @@ -18,19 +18,19 @@ export interface Noun { export const groupsModel = kea([ path(['models', 'groupsModel']), connect({ - values: [teamLogic, ['currentTeamId'], groupsAccessLogic, ['groupsEnabled', 'groupsAccessStatus']], + values: [projectLogic, ['currentProjectId'], groupsAccessLogic, ['groupsEnabled', 'groupsAccessStatus']], }), loaders(({ values }) => ({ groupTypesRaw: [ [] as Array, { loadAllGroupTypes: async () => { - return await api.get(`api/projects/${values.currentTeamId}/groups_types`) + return await api.get(`api/projects/${values.currentProjectId}/groups_types`) }, updateGroupTypesMetadata: async (payload: Array) => { if (values.groupsEnabled) { return await api.update( - `/api/projects/${teamLogic.values.currentTeamId}/groups_types/update_metadata`, + `/api/projects/${values.currentProjectId}/groups_types/update_metadata`, payload ) } diff --git a/frontend/src/models/notebooksModel.ts b/frontend/src/models/notebooksModel.ts index 60c66ac74318d..b4e7e76e55433 100644 --- a/frontend/src/models/notebooksModel.ts +++ b/frontend/src/models/notebooksModel.ts @@ -9,7 +9,7 @@ import type { notebookLogicType } from 'scenes/notebooks/Notebook/notebookLogicT import { defaultNotebookContent, EditorFocusPosition, JSONContent } from 'scenes/notebooks/Notebook/utils' import { notebookPanelLogic } from 'scenes/notebooks/NotebookPanel/notebookPanelLogic' import { LOCAL_NOTEBOOK_TEMPLATES } from 'scenes/notebooks/NotebookTemplates/notebookTemplates' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { urls } from 'scenes/urls' import { InsightVizNode, Node } from '~/queries/schema' @@ -75,7 +75,7 @@ export const notebooksModel = kea([ createNotebookFromDashboard: (dashboard: DashboardType) => ({ dashboard }), }), connect({ - values: [teamLogic, ['currentTeamId']], + values: [projectLogic, ['currentProjectId']], }), reducers({ @@ -105,7 +105,7 @@ export const notebooksModel = kea([ deleteNotebook: async ({ shortId, title }) => { await deleteWithUndo({ - endpoint: `projects/${values.currentTeamId}/notebooks`, + endpoint: `projects/${values.currentProjectId}/notebooks`, object: { name: title || shortId, id: shortId }, }) diff --git a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts index 25d0f75848491..b3cada69a6a89 100644 --- a/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts +++ b/frontend/src/queries/nodes/DataNode/dataNodeLogic.ts @@ -28,7 +28,13 @@ import { userLogic } from 'scenes/userLogic' import { dataNodeCollectionLogic, DataNodeCollectionProps } from '~/queries/nodes/DataNode/dataNodeCollectionLogic' import { removeExpressionComment } from '~/queries/nodes/DataTable/utils' import { performQuery } from '~/queries/query' -import { DashboardFilter, HogQLVariable, QueryStatus } from '~/queries/schema' +import { + DashboardFilter, + ErrorTrackingQuery, + ErrorTrackingQueryResponse, + HogQLVariable, + QueryStatus, +} from '~/queries/schema' import { ActorsQuery, ActorsQueryResponse, @@ -42,7 +48,14 @@ import { PersonsNode, QueryTiming, } from '~/queries/schema' -import { isActorsQuery, isEventsQuery, isInsightActorsQuery, isInsightQueryNode, isPersonsNode } from '~/queries/utils' +import { + isActorsQuery, + isErrorTrackingQuery, + isEventsQuery, + isInsightActorsQuery, + isInsightQueryNode, + isPersonsNode, +} from '~/queries/utils' import type { dataNodeLogicType } from './dataNodeLogicType' @@ -198,7 +211,6 @@ export const dataNodeLogic = kea([ const methodOptions: ApiMethodOptions = { signal: cache.abortController.signal, } - try { const response = await concurrencyController.run({ debugTag: query.kind, @@ -505,11 +517,15 @@ export const dataNodeLogic = kea([ return null } - if ((isEventsQuery(query) || isActorsQuery(query)) && !responseError && !dataLoading) { - if ((response as EventsQueryResponse | ActorsQueryResponse)?.hasMore) { + if ( + (isEventsQuery(query) || isActorsQuery(query) || isErrorTrackingQuery(query)) && + !responseError && + !dataLoading + ) { + if ((response as EventsQueryResponse | ActorsQueryResponse | ErrorTrackingQueryResponse)?.hasMore) { const sortKey = query.orderBy?.[0] ?? 'timestamp DESC' - const typedResults = (response as EventsQueryResponse | ActorsQueryResponse)?.results if (isEventsQuery(query) && sortKey === 'timestamp DESC') { + const typedResults = (response as EventsQueryResponse)?.results const sortColumnIndex = query.select .map((hql) => removeExpressionComment(hql)) .indexOf('timestamp') @@ -528,11 +544,14 @@ export const dataNodeLogic = kea([ } } } else { + const typedResults = ( + response as EventsQueryResponse | ActorsQueryResponse | ErrorTrackingQueryResponse + )?.results return { ...query, offset: typedResults?.length || 0, limit: Math.max(100, Math.min(2 * (typedResults?.length || 100), LOAD_MORE_ROWS_LIMIT)), - } as EventsQuery | ActorsQuery + } as EventsQuery | ActorsQuery | ErrorTrackingQuery } } } diff --git a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx index 32e46f782062d..5a2830a135ef2 100644 --- a/frontend/src/queries/nodes/DataTable/EventRowActions.tsx +++ b/frontend/src/queries/nodes/DataTable/EventRowActions.tsx @@ -1,13 +1,11 @@ -import { useActions } from 'kea' -import { dayjs } from 'lib/dayjs' -import { IconLink, IconPlayCircle } from 'lib/lemon-ui/icons' +import ViewRecordingButton, { mightHaveRecording } from 'lib/components/ViewRecordingButton' +import { IconLink } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { More } from 'lib/lemon-ui/LemonButton/More' import { copyToClipboard } from 'lib/utils/copyToClipboard' import { getCurrentTeamId } from 'lib/utils/getAppContext' import { createActionFromEvent } from 'scenes/activity/explore/createActionFromEvent' import { insightUrlForEvent } from 'scenes/insights/utils' -import { sessionPlayerModalLogic } from 'scenes/session-recordings/player/modal/sessionPlayerModalLogic' import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' @@ -18,7 +16,6 @@ interface EventActionProps { } export function EventRowActions({ event }: EventActionProps): JSX.Element { - const { openSessionPlayer } = useActions(sessionPlayerModalLogic) const insightUrl = insightUrlForEvent(event) return ( @@ -56,25 +53,17 @@ export function EventRowActions({ event }: EventActionProps): JSX.Element { Copy link to event )} - {!!event.properties?.$session_id && ( - { - e.preventDefault() - if (event.properties.$session_id) { - openSessionPlayer( - { id: event.properties.$session_id }, - dayjs(event.timestamp).valueOf() - ) - } - }} - fullWidth - sideIcon={} - data-attr="events-table-usage" - > - View recording - - )} + {insightUrl && ( Try out in Insights diff --git a/frontend/src/queries/nodes/DataTable/queryFeatures.ts b/frontend/src/queries/nodes/DataTable/queryFeatures.ts index ae0351ace46b3..af5659d40223b 100644 --- a/frontend/src/queries/nodes/DataTable/queryFeatures.ts +++ b/frontend/src/queries/nodes/DataTable/queryFeatures.ts @@ -9,7 +9,6 @@ import { isWebGoalsQuery, isWebOverviewQuery, isWebStatsTableQuery, - isWebTopClicksQuery, } from '~/queries/utils' export enum QueryFeature { @@ -62,7 +61,6 @@ export function getQueryFeatures(query: Node): Set { if ( isWebOverviewQuery(query) || - isWebTopClicksQuery(query) || isWebExternalClicksQuery(query) || isWebStatsTableQuery(query) || isWebGoalsQuery(query) diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts index 937c027a0a104..6d2f92eead426 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts @@ -5,6 +5,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { getVariablesFromQuery, haveVariablesOrFiltersChanged } from 'scenes/insights/utils/queryUtils' import { DataVisualizationNode, HogQLVariable } from '~/queries/schema' +import { DashboardType } from '~/types' import { dataVisualizationLogic } from '../../dataVisualizationLogic' import { Variable, VariableType } from '../../types' @@ -15,6 +16,8 @@ export interface VariablesLogicProps { key: string /** Disable any changes to the query */ readOnly: boolean + /** Dashboard ID for the current dashboard if we're viewing one */ + dashboardId?: DashboardType['id'] } const convertValueToCorrectType = (value: string, type: VariableType): number | string | boolean => { @@ -37,7 +40,7 @@ export const variablesLogic = kea([ actions: [dataVisualizationLogic, ['setQuery', 'loadData'], variableDataLogic, ['getVariables']], values: [ dataVisualizationLogic, - ['query', 'insightLogicProps'], + ['query'], variableDataLogic, ['variables', 'variablesLoading'], featureFlagLogic, @@ -124,9 +127,9 @@ export const variablesLogic = kea([ }, ], showVariablesBar: [ - (state) => [state.insightLogicProps], - (insightLogicProps) => { - return !insightLogicProps.dashboardId + () => [(_, props) => props.dashboardId], + (dashboardId) => { + return !dashboardId }, ], }), diff --git a/frontend/src/queries/nodes/DataVisualization/Components/seriesBreakdownLogic.test.ts b/frontend/src/queries/nodes/DataVisualization/Components/seriesBreakdownLogic.test.ts index b57c0c8b89e87..37758458fd8e1 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/seriesBreakdownLogic.test.ts +++ b/frontend/src/queries/nodes/DataVisualization/Components/seriesBreakdownLogic.test.ts @@ -3,7 +3,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { DataVisualizationNode, NodeKind } from '~/queries/schema' import { initKeaTests } from '~/test/init' -import { ChartDisplayType } from '~/types' +import { ChartDisplayType, ItemMode } from '~/types' import { dataNodeLogic } from '../../DataNode/dataNodeLogic' import { dataVisualizationLogic, DataVisualizationLogicProps } from '../dataVisualizationLogic' @@ -63,10 +63,8 @@ const dummyDataVisualizationLogicProps: DataVisualizationLogicProps = { setQuery: (query) => { globalQuery = query }, - insightLogicProps: { - cachedInsight: null, - dashboardItemId: 'new-test-SQL', - }, + insightMode: ItemMode.View, + dataNodeCollectionId: 'new-test-SQL', } describe('seriesBreakdownLogic', () => { diff --git a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx index 9a021d962b0f9..61c189df39ec8 100644 --- a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx +++ b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx @@ -13,6 +13,7 @@ import { DatabaseTableTreeWithItems } from 'scenes/data-warehouse/external/DataW import { InsightErrorState } from 'scenes/insights/EmptyStates' import { insightDataLogic } from 'scenes/insights/insightDataLogic' import { insightLogic } from 'scenes/insights/insightLogic' +import { insightSceneLogic } from 'scenes/insights/insightSceneLogic' import { HogQLBoldNumber } from 'scenes/insights/views/BoldNumber/BoldNumber' import { urls } from 'scenes/urls' @@ -26,7 +27,7 @@ import { NodeKind, } from '~/queries/schema' import { QueryContext } from '~/queries/types' -import { ChartDisplayType, ExporterFormat, InsightLogicProps } from '~/types' +import { ChartDisplayType, ExportContext, ExporterFormat, InsightLogicProps } from '~/types' import { dataNodeLogic, DataNodeLogicProps } from '../DataNode/dataNodeLogic' import { DateRange } from '../DataNode/DateRange' @@ -45,7 +46,7 @@ import { variablesLogic } from './Components/Variables/variablesLogic' import { dataVisualizationLogic, DataVisualizationLogicProps } from './dataVisualizationLogic' import { displayLogic } from './displayLogic' -interface DataTableVisualizationProps { +export interface DataTableVisualizationProps { uniqueKey?: string | number query: DataVisualizationNode setQuery: (query: DataVisualizationNode) => void @@ -54,6 +55,7 @@ interface DataTableVisualizationProps { the data node logic becomes read only implicitly */ cachedResults?: AnyResponseType readOnly?: boolean + exportContext?: ExportContext /** Dashboard variables to override the ones in the query */ variablesOverride?: Record | null } @@ -78,10 +80,15 @@ export function DataTableVisualization({ } const vizKey = insightVizDataNodeKey(insightProps) + const dataNodeCollectionId = insightVizDataCollectionId(insightProps, key) + const { insightMode } = useValues(insightSceneLogic) const dataVisualizationLogicProps: DataVisualizationLogicProps = { key: vizKey, query, - insightLogicProps: insightProps, + dashboardId: insightProps.dashboardId, + dataNodeCollectionId, + loadPriority: insightProps.loadPriority, + insightMode, setQuery, cachedResults, variablesOverride, @@ -92,17 +99,24 @@ export function DataTableVisualization({ key: vizKey, cachedResults, loadPriority: insightProps.loadPriority, - dataNodeCollectionId: insightVizDataCollectionId(insightProps, key), + dataNodeCollectionId, variablesOverride, } + const { insightProps: insightLogicProps } = useValues(insightLogic) + const { exportContext } = useValues(insightDataLogic(insightLogicProps)) + return ( @@ -123,8 +138,6 @@ export function DataTableVisualization({ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX.Element { const { readOnly } = props - const { insightProps } = useValues(insightLogic) - const { exportContext } = useValues(insightDataLogic(insightProps)) const { query, @@ -188,6 +201,7 @@ function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX <> - {exportContext && ( + {props.exportContext && ( diff --git a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts index e6002ced4ce32..e1053f61571aa 100644 --- a/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/dataVisualizationLogic.ts @@ -3,11 +3,9 @@ import { subscriptions } from 'kea-subscriptions' import { dayjs } from 'lib/dayjs' import { lightenDarkenColor, RGBToHex, uuid } from 'lib/utils' import mergeObject from 'lodash.merge' -import { insightSceneLogic } from 'scenes/insights/insightSceneLogic' import { teamLogic } from 'scenes/teamLogic' import { themeLogic } from '~/layout/navigation-3000/themeLogic' -import { insightVizDataCollectionId } from '~/queries/nodes/InsightViz/InsightViz' import { AnyResponseType, ChartAxis, @@ -19,7 +17,7 @@ import { HogQLVariable, } from '~/queries/schema' import { QueryContext } from '~/queries/types' -import { ChartDisplayType, InsightLogicProps, ItemMode } from '~/types' +import { ChartDisplayType, DashboardType, ItemMode } from '~/types' import { dataNodeLogic } from '../DataNode/dataNodeLogic' import { getQueryFeatures, QueryFeature } from '../DataTable/queryFeatures' @@ -64,11 +62,14 @@ export interface AxisSeries { export interface DataVisualizationLogicProps { key: string query: DataVisualizationNode + insightMode: ItemMode + dataNodeCollectionId: string setQuery?: (node: DataVisualizationNode) => void - insightLogicProps: InsightLogicProps context?: QueryContext cachedResults?: AnyResponseType insightLoading?: boolean + dashboardId?: DashboardType['id'] + loadPriority?: number /** Dashboard variables to override the ones in the query */ variablesOverride?: Record | null } @@ -217,14 +218,12 @@ export const dataVisualizationLogic = kea([ values: [ teamLogic, ['currentTeamId'], - insightSceneLogic, - ['insightMode'], dataNodeLogic({ cachedResults: props.cachedResults, key: props.key, query: props.query.source, - dataNodeCollectionId: insightVizDataCollectionId(props.insightLogicProps, props.key), - loadPriority: props.insightLogicProps.loadPriority, + dataNodeCollectionId: props.dataNodeCollectionId, + loadPriority: props.loadPriority, variablesOverride: props.variablesOverride, }), ['response', 'responseLoading', 'responseError', 'queryCancelled'], @@ -236,8 +235,8 @@ export const dataVisualizationLogic = kea([ cachedResults: props.cachedResults, key: props.key, query: props.query.source, - dataNodeCollectionId: insightVizDataCollectionId(props.insightLogicProps, props.key), - loadPriority: props.insightLogicProps.loadPriority, + dataNodeCollectionId: props.dataNodeCollectionId, + loadPriority: props.loadPriority, variablesOverride: props.variablesOverride, }), ['loadData'], @@ -559,31 +558,32 @@ export const dataVisualizationLogic = kea([ return columns.filter((n) => n.type.isNumerical) }, ], + dashboardId: [() => [(_, props) => props.dashboardId], (dashboardId) => dashboardId ?? null], showEditingUI: [ - (state, props) => [state.insightMode, props.insightLogicProps], - (insightMode, insightLogicProps) => { - if (insightLogicProps.dashboardId) { + (state, props) => [props.insightMode, state.dashboardId], + (insightMode, dashboardId) => { + if (dashboardId) { return false } return insightMode == ItemMode.Edit }, ], - insightLogicProps: [(_state, props) => [props.insightLogicProps], (insightLogicProps) => insightLogicProps], showResultControls: [ - (state, props) => [state.insightMode, props.insightLogicProps], - (insightMode, insightLogicProps) => { + (state, props) => [props.insightMode, state.dashboardId], + (insightMode, dashboardId) => { if (insightMode === ItemMode.Edit) { return true } - return !insightLogicProps.dashboardId + return !dashboardId }, ], presetChartHeight: [ - (_state, props) => [props.insightLogicProps], - (insightLogicProps) => { - return !insightLogicProps.dashboardId + (state, props) => [props.key, state.dashboardId], + (key, dashboardId) => { + // Key for SQL editor based visiaulizations + return !key.includes('SQLEditorScene') && !dashboardId }, ], sourceFeatures: [(_, props) => [props.query], (query): Set => getQueryFeatures(query.source)], diff --git a/frontend/src/queries/nodes/HogQLQuery/HogQLQueryEditor.tsx b/frontend/src/queries/nodes/HogQLQuery/HogQLQueryEditor.tsx index 37a8c5fce49c9..49ebdc16ae396 100644 --- a/frontend/src/queries/nodes/HogQLQuery/HogQLQueryEditor.tsx +++ b/frontend/src/queries/nodes/HogQLQuery/HogQLQueryEditor.tsx @@ -30,6 +30,7 @@ export interface HogQLQueryEditorProps { onChange?: (query: string) => void embedded?: boolean editorFooter?: (hasErrors: boolean, errors: string | null, isValidView: boolean) => JSX.Element + queryResponse?: Record } let uniqueNode = 0 @@ -60,6 +61,7 @@ export function HogQLQueryEditor(props: HogQLQueryEditorProps): JSX.Element { key, editor, monaco, + queryResponse: props.queryResponse, } const logic = hogQLQueryEditorLogic(hogQLQueryEditorLogicProps) const { queryInput, prompt, aiAvailable, promptError, promptLoading, multitab } = useValues(logic) diff --git a/frontend/src/queries/nodes/HogQLQuery/hogQLQueryEditorLogic.tsx b/frontend/src/queries/nodes/HogQLQuery/hogQLQueryEditorLogic.tsx index ac798bd39056e..0efaa396e9c94 100644 --- a/frontend/src/queries/nodes/HogQLQuery/hogQLQueryEditorLogic.tsx +++ b/frontend/src/queries/nodes/HogQLQuery/hogQLQueryEditorLogic.tsx @@ -31,6 +31,7 @@ export interface HogQLQueryEditorLogicProps { monaco?: Monaco | null editor?: editor.IStandaloneCodeEditor | null metadataSource?: DataNode + queryResponse?: Record } export const hogQLQueryEditorLogic = kea([ @@ -139,10 +140,13 @@ export const hogQLQueryEditorLogic = kea([ kind: NodeKind.HogQLQuery, query: values.queryInput, } - await dataWarehouseViewsLogic.asyncActions.createDataWarehouseSavedQuery({ name, query }) + const types = props.queryResponse?.types ?? [] + + await dataWarehouseViewsLogic.asyncActions.createDataWarehouseSavedQuery({ name, query, types }) }, onUpdateView: async () => { - actions.updateView(values.queryInput) + const types = props.queryResponse?.types ?? [] + actions.updateView(values.queryInput, types) }, })), ]) diff --git a/frontend/src/queries/nodes/HogQLX/render.tsx b/frontend/src/queries/nodes/HogQLX/render.tsx index 74517d630886a..fbac43f53ce01 100644 --- a/frontend/src/queries/nodes/HogQLX/render.tsx +++ b/frontend/src/queries/nodes/HogQLX/render.tsx @@ -1,10 +1,7 @@ -import { LemonButton, Link } from '@posthog/lemon-ui' -import { useActions } from 'kea' +import { Link } from '@posthog/lemon-ui' import { JSONViewer } from 'lib/components/JSONViewer' import { Sparkline } from 'lib/components/Sparkline' -import { IconPlayCircle } from 'lib/lemon-ui/icons' -import { sessionPlayerModalLogic } from 'scenes/session-recordings/player/modal/sessionPlayerModalLogic' -import { urls } from 'scenes/urls' +import ViewRecordingButton from 'lib/components/ViewRecordingButton' import { ErrorBoundary } from '~/layout/ErrorBoundary' @@ -24,30 +21,6 @@ export function parseHogQLX(value: any): any { return value.map((v) => parseHogQLX(v)) } -function ViewRecordingModalButton({ sessionId }: { sessionId: string }): JSX.Element { - const { openSessionPlayer } = useActions(sessionPlayerModalLogic) - return ( - - } - data-attr="hog-ql-view-recording-button" - to={urls.replaySingle(sessionId)} - onClick={(e) => { - e.preventDefault() - if (sessionId) { - openSessionPlayer({ id: sessionId }) - } - }} - className="inline-block" - > - View recording - - - ) -} - export function renderHogQLX(value: any): JSX.Element { const object = parseHogQLX(value) @@ -68,7 +41,18 @@ export function renderHogQLX(value: any): JSX.Element { ) } else if (tag === 'RecordingButton') { const { sessionId, ...props } = rest - return + return ( + + + + ) } else if (tag === 'a') { const { href, source, target } = rest return ( diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index 640b17a507df8..4cc9cdcdb99f1 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -352,9 +352,6 @@ { "$ref": "#/definitions/WebExternalClicksTableQuery" }, - { - "$ref": "#/definitions/WebTopClicksQuery" - }, { "$ref": "#/definitions/WebGoalsQuery" }, @@ -369,6 +366,9 @@ }, { "$ref": "#/definitions/ExperimentTrendsQuery" + }, + { + "$ref": "#/definitions/RecordingsQuery" } ] }, @@ -483,6 +483,9 @@ }, { "$ref": "#/definitions/EventsQueryResponse" + }, + { + "$ref": "#/definitions/ErrorTrackingQueryResponse" } ] }, @@ -3620,83 +3623,6 @@ ], "type": "object" }, - "CachedWebTopClicksQueryResponse": { - "additionalProperties": false, - "properties": { - "cache_key": { - "type": "string" - }, - "cache_target_age": { - "format": "date-time", - "type": "string" - }, - "calculation_trigger": { - "description": "What triggered the calculation of the query, leave empty if user/immediate", - "type": "string" - }, - "columns": { - "items": {}, - "type": "array" - }, - "error": { - "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - "type": "string" - }, - "hogql": { - "description": "Generated HogQL query.", - "type": "string" - }, - "is_cached": { - "type": "boolean" - }, - "last_refresh": { - "format": "date-time", - "type": "string" - }, - "modifiers": { - "$ref": "#/definitions/HogQLQueryModifiers", - "description": "Modifiers used when performing the query" - }, - "next_allowed_client_refresh": { - "format": "date-time", - "type": "string" - }, - "query_status": { - "$ref": "#/definitions/QueryStatus", - "description": "Query status indicates whether next to the provided data, a query is still running." - }, - "results": { - "items": {}, - "type": "array" - }, - "samplingRate": { - "$ref": "#/definitions/SamplingRate" - }, - "timezone": { - "type": "string" - }, - "timings": { - "description": "Measured timings for different parts of the query generation process", - "items": { - "$ref": "#/definitions/QueryTiming" - }, - "type": "array" - }, - "types": { - "items": {}, - "type": "array" - } - }, - "required": [ - "cache_key", - "is_cached", - "last_refresh", - "next_allowed_client_refresh", - "results", - "timezone" - ], - "type": "object" - }, "ChartAxis": { "additionalProperties": false, "properties": { @@ -4397,51 +4323,6 @@ "required": ["results"], "type": "object" }, - { - "additionalProperties": false, - "properties": { - "columns": { - "items": {}, - "type": "array" - }, - "error": { - "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - "type": "string" - }, - "hogql": { - "description": "Generated HogQL query.", - "type": "string" - }, - "modifiers": { - "$ref": "#/definitions/HogQLQueryModifiers", - "description": "Modifiers used when performing the query" - }, - "query_status": { - "$ref": "#/definitions/QueryStatus", - "description": "Query status indicates whether next to the provided data, a query is still running." - }, - "results": { - "items": {}, - "type": "array" - }, - "samplingRate": { - "$ref": "#/definitions/SamplingRate" - }, - "timings": { - "description": "Measured timings for different parts of the query generation process", - "items": { - "$ref": "#/definitions/QueryTiming" - }, - "type": "array" - }, - "types": { - "items": {}, - "type": "array" - } - }, - "required": ["results"], - "type": "object" - }, { "additionalProperties": false, "properties": { @@ -4825,9 +4706,6 @@ { "$ref": "#/definitions/WebExternalClicksTableQuery" }, - { - "$ref": "#/definitions/WebTopClicksQuery" - }, { "$ref": "#/definitions/WebGoalsQuery" }, @@ -5586,7 +5464,10 @@ "$ref": "#/definitions/HogQLQueryModifiers", "description": "Modifiers used when performing the query" }, - "order": { + "offset": { + "type": "integer" + }, + "orderBy": { "enum": ["last_seen", "first_seen", "occurrences", "users", "sessions"], "type": "string" }, @@ -8622,7 +8503,6 @@ "InsightActorsQueryOptions", "FunnelCorrelationQuery", "WebOverviewQuery", - "WebTopClicksQuery", "WebStatsTableQuery", "WebExternalClicksTableQuery", "WebGoalsQuery", @@ -9713,51 +9593,6 @@ "required": ["results"], "type": "object" }, - { - "additionalProperties": false, - "properties": { - "columns": { - "items": {}, - "type": "array" - }, - "error": { - "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - "type": "string" - }, - "hogql": { - "description": "Generated HogQL query.", - "type": "string" - }, - "modifiers": { - "$ref": "#/definitions/HogQLQueryModifiers", - "description": "Modifiers used when performing the query" - }, - "query_status": { - "$ref": "#/definitions/QueryStatus", - "description": "Query status indicates whether next to the provided data, a query is still running." - }, - "results": { - "items": {}, - "type": "array" - }, - "samplingRate": { - "$ref": "#/definitions/SamplingRate" - }, - "timings": { - "description": "Measured timings for different parts of the query generation process", - "items": { - "$ref": "#/definitions/QueryTiming" - }, - "type": "array" - }, - "types": { - "items": {}, - "type": "array" - } - }, - "required": ["results"], - "type": "object" - }, { "additionalProperties": false, "properties": { @@ -10386,51 +10221,6 @@ "required": ["results"], "type": "object" }, - { - "additionalProperties": false, - "properties": { - "columns": { - "items": {}, - "type": "array" - }, - "error": { - "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - "type": "string" - }, - "hogql": { - "description": "Generated HogQL query.", - "type": "string" - }, - "modifiers": { - "$ref": "#/definitions/HogQLQueryModifiers", - "description": "Modifiers used when performing the query" - }, - "query_status": { - "$ref": "#/definitions/QueryStatus", - "description": "Query status indicates whether next to the provided data, a query is still running." - }, - "results": { - "items": {}, - "type": "array" - }, - "samplingRate": { - "$ref": "#/definitions/SamplingRate" - }, - "timings": { - "description": "Measured timings for different parts of the query generation process", - "items": { - "$ref": "#/definitions/QueryTiming" - }, - "type": "array" - }, - "types": { - "items": {}, - "type": "array" - } - }, - "required": ["results"], - "type": "object" - }, { "additionalProperties": false, "properties": { @@ -11175,9 +10965,6 @@ { "$ref": "#/definitions/WebExternalClicksTableQuery" }, - { - "$ref": "#/definitions/WebTopClicksQuery" - }, { "$ref": "#/definitions/WebGoalsQuery" }, @@ -11434,6 +11221,7 @@ "type": "array" }, "date_from": { + "default": "-3d", "type": ["string", "null"] }, "date_to": { @@ -11469,10 +11257,12 @@ "type": "integer" }, "operand": { - "$ref": "#/definitions/FilterLogicalOperator" + "$ref": "#/definitions/FilterLogicalOperator", + "default": "AND" }, "order": { - "$ref": "#/definitions/RecordingOrder" + "$ref": "#/definitions/RecordingOrder", + "default": "start_time" }, "person_uuid": { "type": "string" @@ -13568,104 +13358,6 @@ "required": ["results"], "type": "object" }, - "WebTopClicksQuery": { - "additionalProperties": false, - "properties": { - "conversionGoal": { - "anyOf": [ - { - "$ref": "#/definitions/WebAnalyticsConversionGoal" - }, - { - "type": "null" - } - ] - }, - "dateRange": { - "$ref": "#/definitions/DateRange" - }, - "filterTestAccounts": { - "type": "boolean" - }, - "kind": { - "const": "WebTopClicksQuery", - "type": "string" - }, - "modifiers": { - "$ref": "#/definitions/HogQLQueryModifiers", - "description": "Modifiers used when performing the query" - }, - "properties": { - "$ref": "#/definitions/WebAnalyticsPropertyFilters" - }, - "response": { - "$ref": "#/definitions/WebTopClicksQueryResponse" - }, - "sampling": { - "additionalProperties": false, - "properties": { - "enabled": { - "type": "boolean" - }, - "forceSamplingRate": { - "$ref": "#/definitions/SamplingRate" - } - }, - "type": "object" - }, - "useSessionsTable": { - "deprecated": "ignored, always treated as enabled *", - "type": "boolean" - } - }, - "required": ["kind", "properties"], - "type": "object" - }, - "WebTopClicksQueryResponse": { - "additionalProperties": false, - "properties": { - "columns": { - "items": {}, - "type": "array" - }, - "error": { - "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - "type": "string" - }, - "hogql": { - "description": "Generated HogQL query.", - "type": "string" - }, - "modifiers": { - "$ref": "#/definitions/HogQLQueryModifiers", - "description": "Modifiers used when performing the query" - }, - "query_status": { - "$ref": "#/definitions/QueryStatus", - "description": "Query status indicates whether next to the provided data, a query is still running." - }, - "results": { - "items": {}, - "type": "array" - }, - "samplingRate": { - "$ref": "#/definitions/SamplingRate" - }, - "timings": { - "description": "Measured timings for different parts of the query generation process", - "items": { - "$ref": "#/definitions/QueryTiming" - }, - "type": "array" - }, - "types": { - "items": {}, - "type": "array" - } - }, - "required": ["results"], - "type": "object" - }, "YAxisSettings": { "additionalProperties": false, "properties": { diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index b033a9150a2a7..790bfd5b681c3 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -92,7 +92,6 @@ export enum NodeKind { // Web analytics queries WebOverviewQuery = 'WebOverviewQuery', - WebTopClicksQuery = 'WebTopClicksQuery', WebStatsTableQuery = 'WebStatsTableQuery', WebExternalClicksTableQuery = 'WebExternalClicksTableQuery', WebGoalsQuery = 'WebGoalsQuery', @@ -127,12 +126,12 @@ export type AnyDataNode = | WebOverviewQuery | WebStatsTableQuery | WebExternalClicksTableQuery - | WebTopClicksQuery | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery | ExperimentFunnelsQuery | ExperimentTrendsQuery + | RecordingsQuery /** * @discriminator kind @@ -155,7 +154,6 @@ export type QuerySchema = | WebOverviewQuery | WebStatsTableQuery | WebExternalClicksTableQuery - | WebTopClicksQuery | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery @@ -214,6 +212,7 @@ export type AnyResponseType = | HogQLAutocompleteResponse | EventsNode['response'] | EventsQueryResponse + | ErrorTrackingQueryResponse /** @internal - no need to emit to schema.json. */ export interface DataNode = Record> extends Node { @@ -327,6 +326,9 @@ export type RecordingOrder = export interface RecordingsQuery extends DataNode { kind: NodeKind.RecordingsQuery + /** + * @default "-3d" + * */ date_from?: string | null date_to?: string | null events?: FilterType['events'] @@ -335,9 +337,15 @@ export interface RecordingsQuery extends DataNode { console_log_filters?: LogEntryPropertyFilter[] having_predicates?: AnyPropertyFilter[] // duration and snapshot_source filters filter_test_accounts?: boolean + /** + * @default "AND" + * */ operand?: FilterLogicalOperator session_ids?: string[] person_uuid?: string + /** + * @default "start_time" + * */ order?: RecordingOrder limit?: integer offset?: integer @@ -617,7 +625,6 @@ export interface DataTableNode | WebOverviewQuery | WebStatsTableQuery | WebExternalClicksTableQuery - | WebTopClicksQuery | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery @@ -638,7 +645,6 @@ export interface DataTableNode | WebOverviewQuery | WebStatsTableQuery | WebExternalClicksTableQuery - | WebTopClicksQuery | WebGoalsQuery | SessionAttributionExplorerQuery | ErrorTrackingQuery @@ -1819,17 +1825,6 @@ export interface WebOverviewQueryResponse extends AnalyticsQueryResponseBase -export interface WebTopClicksQuery extends WebAnalyticsQueryBase { - kind: NodeKind.WebTopClicksQuery -} -export interface WebTopClicksQueryResponse extends AnalyticsQueryResponseBase { - types?: unknown[] - columns?: unknown[] - samplingRate?: SamplingRate -} - -export type CachedWebTopClicksQueryResponse = CachedQueryResponse - export enum WebStatsBreakdown { Page = 'Page', InitialPage = 'InitialPage', @@ -1936,13 +1931,14 @@ export interface ErrorTrackingQuery extends DataNode kind: NodeKind.ErrorTrackingQuery issueId?: string select?: HogQLExpression[] - order?: 'last_seen' | 'first_seen' | 'occurrences' | 'users' | 'sessions' + orderBy?: 'last_seen' | 'first_seen' | 'occurrences' | 'users' | 'sessions' dateRange: DateRange assignee?: integer | null filterGroup?: PropertyGroupFilter filterTestAccounts?: boolean searchQuery?: string limit?: integer + offset?: integer } export interface ErrorTrackingIssue { diff --git a/frontend/src/queries/types.ts b/frontend/src/queries/types.ts index afbae27286816..849ebf6c33e36 100644 --- a/frontend/src/queries/types.ts +++ b/frontend/src/queries/types.ts @@ -49,7 +49,7 @@ export type QueryContextColumnComponent = ComponentType<{ }> interface QueryContextColumn { - title?: string + title?: JSX.Element | string renderTitle?: QueryContextColumnTitleComponent render?: QueryContextColumnComponent align?: 'left' | 'right' | 'center' // default is left diff --git a/frontend/src/queries/utils.ts b/frontend/src/queries/utils.ts index f2828675a643d..dc98b2ec2d3d1 100644 --- a/frontend/src/queries/utils.ts +++ b/frontend/src/queries/utils.ts @@ -13,6 +13,7 @@ import { DataVisualizationNode, DataWarehouseNode, DateRange, + ErrorTrackingQuery, EventsNode, EventsQuery, FunnelsQuery, @@ -40,7 +41,6 @@ import { WebGoalsQuery, WebOverviewQuery, WebStatsTableQuery, - WebTopClicksQuery, } from '~/queries/schema' import { ChartDisplayType, IntervalType } from '~/types' @@ -133,10 +133,6 @@ export function isWebExternalClicksQuery(node?: Record | null): boo return node?.kind === NodeKind.WebExternalClicksTableQuery } -export function isWebTopClicksQuery(node?: Record | null): node is WebTopClicksQuery { - return node?.kind === NodeKind.WebTopClicksQuery -} - export function isWebGoalsQuery(node?: Record | null): node is WebGoalsQuery { return node?.kind === NodeKind.WebGoalsQuery } @@ -147,6 +143,10 @@ export function isSessionAttributionExplorerQuery( return node?.kind === NodeKind.SessionAttributionExplorerQuery } +export function isErrorTrackingQuery(node?: Record | null): node is ErrorTrackingQuery { + return node?.kind === NodeKind.ErrorTrackingQuery +} + export function containsHogQLQuery(node?: Record | null): boolean { if (!node) { return false diff --git a/frontend/src/scenes/actions/actionLogic.ts b/frontend/src/scenes/actions/actionLogic.ts index 15d95e1475da2..e3a7791b6cdc1 100644 --- a/frontend/src/scenes/actions/actionLogic.ts +++ b/frontend/src/scenes/actions/actionLogic.ts @@ -40,7 +40,7 @@ export const actionLogic = kea([ null as HogFunctionType[] | null, { loadMatchingHogFunctions: async () => { - const res = await api.hogFunctions.list({ filters: { actions: [{ id: `${props.id}` }] } }) + const res = await api.hogFunctions.list({ actions: [{ id: `${props.id}` }] }) return res.results }, diff --git a/frontend/src/scenes/activity/explore/EventDetails.tsx b/frontend/src/scenes/activity/explore/EventDetails.tsx index 1b7a1733cf48f..60f59f338f618 100644 --- a/frontend/src/scenes/activity/explore/EventDetails.tsx +++ b/frontend/src/scenes/activity/explore/EventDetails.tsx @@ -1,6 +1,5 @@ import './EventDetails.scss' -import { Properties } from '@posthog/plugin-scaffold' import { ErrorDisplay } from 'lib/components/Errors/ErrorDisplay' import { HTMLElementsDisplay } from 'lib/components/HTMLElementsDisplay/HTMLElementsDisplay' import { JSONViewer } from 'lib/components/JSONViewer' @@ -11,6 +10,7 @@ import { LemonTableProps } from 'lib/lemon-ui/LemonTable' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import { CORE_FILTER_DEFINITIONS_BY_GROUP, KNOWN_PROMOTED_PROPERTY_PARENTS } from 'lib/taxonomy' import { pluralize } from 'lib/utils' +import { AutocaptureImageTab, autocaptureToImage } from 'lib/utils/event-property-utls' import { useState } from 'react' import { EventType, PropertyDefinitionType } from '~/types' @@ -24,9 +24,9 @@ export function EventDetails({ event, tableProps }: EventDetailsProps): JSX.Elem const [showSystemProps, setShowSystemProps] = useState(false) const [activeTab, setActiveTab] = useState(event.event === '$exception' ? 'exception' : 'properties') - const displayedEventProperties: Properties = {} - const visibleSystemProperties: Properties = {} - const featureFlagProperties: Properties = {} + const displayedEventProperties = {} + const visibleSystemProperties = {} + const featureFlagProperties = {} let systemPropsCount = 0 for (const key of Object.keys(event.properties)) { if (CORE_FILTER_DEFINITIONS_BY_GROUP.events[key] && CORE_FILTER_DEFINITIONS_BY_GROUP.events[key].system) { @@ -111,6 +111,14 @@ export function EventDetails({ event, tableProps }: EventDetailsProps): JSX.Elem }) } + if (event.elements && autocaptureToImage(event.elements)) { + tabs.push({ + key: 'image', + label: 'Image', + content: , + }) + } + if (event.event === '$exception') { tabs.push({ key: 'exception', diff --git a/frontend/src/scenes/billing/ExportsUnsubscribeTable/exportsUnsubscribeTableLogic.tsx b/frontend/src/scenes/billing/ExportsUnsubscribeTable/exportsUnsubscribeTableLogic.tsx index 1763f5ee50ffe..48527d28af765 100644 --- a/frontend/src/scenes/billing/ExportsUnsubscribeTable/exportsUnsubscribeTableLogic.tsx +++ b/frontend/src/scenes/billing/ExportsUnsubscribeTable/exportsUnsubscribeTableLogic.tsx @@ -3,6 +3,7 @@ import { actions, afterMount, connect, kea, listeners, path, selectors } from 'k import { loaders } from 'kea-loaders' import api from 'lib/api' import { getCurrentTeamId } from 'lib/utils/getAppContext' +import { DESTINATION_TYPES } from 'scenes/pipeline/destinations/constants' import { pipelineDestinationsLogic } from 'scenes/pipeline/destinations/destinationsLogic' import { HogFunctionIcon } from 'scenes/pipeline/hogfunctions/HogFunctionIcon' import { pipelineAccessLogic } from 'scenes/pipeline/pipelineAccessLogic' @@ -29,17 +30,17 @@ export interface ItemToDisable { export const exportsUnsubscribeTableLogic = kea([ path(['scenes', 'pipeline', 'ExportsUnsubscribeTableLogic']), - connect({ + connect(() => ({ values: [ pipelineAccessLogic, ['canConfigurePlugins'], userLogic, ['user'], - pipelineDestinationsLogic, + pipelineDestinationsLogic({ types: DESTINATION_TYPES }), ['paidHogFunctions'], ], - actions: [pipelineDestinationsLogic, ['toggleNodeHogFunction']], - }), + actions: [pipelineDestinationsLogic({ types: DESTINATION_TYPES }), ['toggleNodeHogFunction']], + })), actions({ disablePlugin: (id: number) => ({ id }), diff --git a/frontend/src/scenes/data-management/database/DatabaseTable.tsx b/frontend/src/scenes/data-management/database/DatabaseTable.tsx index c2dd16f4b6ab0..ec4f34a783f83 100644 --- a/frontend/src/scenes/data-management/database/DatabaseTable.tsx +++ b/frontend/src/scenes/data-management/database/DatabaseTable.tsx @@ -9,7 +9,7 @@ import { useCallback } from 'react' import { dataWarehouseJoinsLogic } from 'scenes/data-warehouse/external/dataWarehouseJoinsLogic' import { dataWarehouseSceneLogic } from 'scenes/data-warehouse/settings/dataWarehouseSceneLogic' import { viewLinkLogic } from 'scenes/data-warehouse/viewLinkLogic' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { urls } from 'scenes/urls' import { DatabaseSchemaTable, DatabaseSerializedFieldType } from '~/queries/schema' @@ -46,7 +46,7 @@ const isNonEditableSchemaType = (schemaType: unknown): schemaType is NonEditable return typeof schemaType === 'string' && nonEditableSchemaTypes.includes(schemaType as NonEditableSchemaTypes) } const JoinsMoreMenu = ({ tableName, fieldName }: { tableName: string; fieldName: string }): JSX.Element => { - const { currentTeamId } = useValues(teamLogic) + const { currentProjectId } = useValues(projectLogic) const { toggleEditJoinModal } = useActions(viewLinkLogic) const { joins, joinsLoading } = useValues(dataWarehouseJoinsLogic) const { loadJoins } = useActions(dataWarehouseJoinsLogic) @@ -68,7 +68,7 @@ const JoinsMoreMenu = ({ tableName, fieldName }: { tableName: string; fieldName: fullWidth onClick={() => { void deleteWithUndo({ - endpoint: `projects/${currentTeamId}/warehouse_view_link`, + endpoint: `projects/${currentProjectId}/warehouse_view_link`, object: { id: join.id, name: `${join.field_name} on ${join.source_table_name}`, diff --git a/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx b/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx index e2232a1a8dbcc..908c792da1fba 100644 --- a/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx +++ b/frontend/src/scenes/data-management/ingestion-warnings/IngestionWarningsView.tsx @@ -3,8 +3,7 @@ import { ReadingHog } from 'lib/components/hedgehogs' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' import { Sparkline } from 'lib/components/Sparkline' import { TZLabel } from 'lib/components/TZLabel' -import { IconPlayCircle } from 'lib/lemon-ui/icons' -import { LemonButton } from 'lib/lemon-ui/LemonButton' +import ViewRecordingButton from 'lib/components/ViewRecordingButton' import { LemonTable } from 'lib/lemon-ui/LemonTable' import { Link } from 'lib/lemon-ui/Link' import { urls } from 'scenes/urls' @@ -155,15 +154,13 @@ const WARNING_TYPE_RENDERER = {
  • session_id: {details.session_id}
  • - } data-attr="skewed-timestamp-view-recording" - > - View recording - + />
    ) @@ -188,15 +185,13 @@ const WARNING_TYPE_RENDERER = {
  • skew: {details.daysFromNow} days
  • - } data-attr="skewed-timestamp-view-recording" - > - View recording - + />
    ) @@ -216,15 +211,13 @@ const WARNING_TYPE_RENDERER = {
  • session_id: {details.session_id}
  • - } data-attr="message-too-large-view-recording" - > - View recording - + />
    ) diff --git a/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts b/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts index 8c1ce8f75f838..0e9982290b3c6 100644 --- a/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts +++ b/frontend/src/scenes/data-management/ingestion-warnings/ingestionWarningsLogic.ts @@ -3,6 +3,7 @@ import { loaders } from 'kea-loaders' import api from 'lib/api' import { dayjs, dayjsUtcToTimezone } from 'lib/dayjs' import { range } from 'lib/utils' +import { projectLogic } from 'scenes/projectLogic' import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' @@ -31,7 +32,7 @@ export const ingestionWarningsLogic = kea([ path(['scenes', 'data-management', 'ingestion-warnings', 'ingestionWarningsLogic']), connect({ - values: [teamLogic, ['currentTeamId', 'timezone']], + values: [teamLogic, ['timezone'], projectLogic, ['currentProjectId']], }), loaders(({ values }) => ({ @@ -39,7 +40,7 @@ export const ingestionWarningsLogic = kea([ [] as IngestionWarningSummary[], { loadData: async () => { - const { results } = await api.get(`api/projects/${values.currentTeamId}/ingestion_warnings`) + const { results } = await api.get(`api/projects/${values.currentProjectId}/ingestion_warnings`) return results }, }, diff --git a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx index d6f9e9f083146..3004b8ee60daf 100644 --- a/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx +++ b/frontend/src/scenes/data-warehouse/ViewLinkModal.tsx @@ -3,6 +3,7 @@ import './ViewLinkModal.scss' import { IconCollapse, IconExpand } from '@posthog/icons' import { LemonButton, + LemonCheckbox, LemonDivider, LemonDropdown, LemonInput, @@ -58,6 +59,8 @@ export function ViewLinkForm(): JSX.Element { sourceIsUsingHogQLExpression, joiningIsUsingHogQLExpression, isViewLinkSubmitting, + experimentsOptimized, + experimentsTimestampKey, } = useValues(viewLinkLogic) const { selectJoiningTable, @@ -66,6 +69,8 @@ export function ViewLinkForm(): JSX.Element { setFieldName, selectSourceKey, selectJoiningKey, + setExperimentsOptimized, + selectExperimentsTimestampKey, } = useActions(viewLinkLogic) const [advancedSettingsExpanded, setAdvancedSettingsExpanded] = useState(false) @@ -151,6 +156,37 @@ export function ViewLinkForm(): JSX.Element {
    + {'events' === selectedJoiningTableName && ( +
    + +
    +
    + Optimize for Experiments + + setExperimentsOptimized(checked)} + fullWidth + label="Limit join to most recent matching event based on timestamp" + /> + +
    +
    + Source Timestamp Key + + + +
    +
    +
    + )} {sqlCodeSnippet && (
    diff --git a/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx b/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx new file mode 100644 index 0000000000000..988323bd02093 --- /dev/null +++ b/frontend/src/scenes/data-warehouse/editor/OutputPane.tsx @@ -0,0 +1,397 @@ +import 'react-data-grid/lib/styles.css' + +import { IconGear } from '@posthog/icons' +import { LemonButton, LemonTabs, Spinner } from '@posthog/lemon-ui' +import clsx from 'clsx' +import { BindLogic, useActions, useValues } from 'kea' +import { router } from 'kea-router' +import { AnimationType } from 'lib/animations/animations' +import { Animation } from 'lib/components/Animation/Animation' +import { ExportButton } from 'lib/components/ExportButton/ExportButton' +import { useMemo } from 'react' +import DataGrid from 'react-data-grid' +import { InsightErrorState } from 'scenes/insights/EmptyStates' +import { insightDataLogic } from 'scenes/insights/insightDataLogic' +import { insightLogic } from 'scenes/insights/insightLogic' +import { HogQLBoldNumber } from 'scenes/insights/views/BoldNumber/BoldNumber' + +import { KeyboardShortcut } from '~/layout/navigation-3000/components/KeyboardShortcut' +import { themeLogic } from '~/layout/navigation-3000/themeLogic' +import { dataNodeLogic, DataNodeLogicProps } from '~/queries/nodes/DataNode/dataNodeLogic' +import { LineGraph } from '~/queries/nodes/DataVisualization/Components/Charts/LineGraph' +import { SideBar } from '~/queries/nodes/DataVisualization/Components/SideBar' +import { Table } from '~/queries/nodes/DataVisualization/Components/Table' +import { TableDisplay } from '~/queries/nodes/DataVisualization/Components/TableDisplay' +import { variableModalLogic } from '~/queries/nodes/DataVisualization/Components/Variables/variableModalLogic' +import { VariablesForInsight } from '~/queries/nodes/DataVisualization/Components/Variables/Variables' +import { variablesLogic } from '~/queries/nodes/DataVisualization/Components/Variables/variablesLogic' +import { DataTableVisualizationProps } from '~/queries/nodes/DataVisualization/DataVisualization' +import { + dataVisualizationLogic, + DataVisualizationLogicProps, +} from '~/queries/nodes/DataVisualization/dataVisualizationLogic' +import { displayLogic } from '~/queries/nodes/DataVisualization/displayLogic' +import { DataVisualizationNode, HogQLQueryResponse, NodeKind } from '~/queries/schema' +import { ChartDisplayType, ExporterFormat, ItemMode } from '~/types' + +import { DATAWAREHOUSE_EDITOR_ITEM_ID } from '../external/dataWarehouseExternalSceneLogic' +import { dataWarehouseViewsLogic } from '../saved_queries/dataWarehouseViewsLogic' +import { multitabEditorLogic } from './multitabEditorLogic' +import { outputPaneLogic, OutputTab } from './outputPaneLogic' + +interface OutputPaneProps { + onSave: () => void + saveDisabledReason?: string + onQueryInputChange: () => void + logicKey: string + query: string +} + +export function OutputPane({ + onQueryInputChange, + onSave, + saveDisabledReason, + logicKey, + query, +}: OutputPaneProps): JSX.Element { + const { activeTab } = useValues(outputPaneLogic) + const { setActiveTab } = useActions(outputPaneLogic) + + const codeEditorKey = `hogQLQueryEditor/${router.values.location.pathname}` + + const { editingView, queryInput } = useValues( + multitabEditorLogic({ + key: codeEditorKey, + }) + ) + const { isDarkModeOn } = useValues(themeLogic) + const { response, responseLoading } = useValues( + dataNodeLogic({ + key: logicKey, + query: { + kind: NodeKind.HogQLQuery, + query, + }, + doNotLoad: !query, + }) + ) + const { dataWarehouseSavedQueriesLoading } = useValues(dataWarehouseViewsLogic) + const { updateDataWarehouseSavedQuery } = useActions(dataWarehouseViewsLogic) + + const { insightProps } = useValues( + insightLogic({ + dashboardItemId: DATAWAREHOUSE_EDITOR_ITEM_ID, + cachedInsight: null, + doNotLoad: true, + }) + ) + const { setQuery } = useActions( + insightDataLogic({ + ...insightProps, + }) + ) + + const columns = useMemo(() => { + return ( + response?.columns?.map((column: string) => ({ + key: column, + name: column, + resizable: true, + })) ?? [] + ) + }, [response]) + + const rows = useMemo(() => { + if (!response?.results) { + return [] + } + return response?.results?.map((row: any[]) => { + const rowObject: Record = {} + response.columns.forEach((column: string, i: number) => { + rowObject[column] = row[i] + }) + return rowObject + }) + }, [response]) + + const Content = (): JSX.Element | null => { + if (activeTab === OutputTab.Results) { + return responseLoading ? ( + + ) : !response ? ( + Query results will appear here + ) : ( +
    + +
    + ) + } + + if (activeTab === OutputTab.Visualization) { + return !response ? ( +
    + Query results will visualized here +
    + ) : ( +
    + +
    + ) + } + + return null + } + + return ( +
    +
    + setActiveTab(tab as OutputTab)} + tabs={[ + { + key: OutputTab.Results, + label: 'Results', + }, + { + key: OutputTab.Visualization, + label: 'Visualization', + }, + ]} + /> +
    + {editingView ? ( + <> + + updateDataWarehouseSavedQuery({ + id: editingView.id, + query: { + kind: NodeKind.HogQLQuery, + query: queryInput, + }, + types: response?.types ?? [], + }) + } + > + Update view + + + ) : ( + onSave()} disabledReason={saveDisabledReason}> + Save as view + + )} + onQueryInputChange()}> + Run + + +
    +
    +
    + +
    +
    + ) +} + +function DataTableVisualizationContent({ + query, + setQuery, + activeTab, +}: { + query: DataVisualizationNode + setQuery: (query: DataVisualizationNode) => void + activeTab: OutputTab +}): JSX.Element { + const vizKey = `SQLEditorScene.${activeTab}` + const dataVisualizationLogicProps: DataVisualizationLogicProps = { + key: vizKey, + query, + dashboardId: undefined, + dataNodeCollectionId: vizKey, + insightMode: ItemMode.Edit, + loadPriority: undefined, + setQuery, + cachedResults: undefined, + variablesOverride: undefined, + } + + const dataNodeLogicProps: DataNodeLogicProps = { + query: query.source, + key: vizKey, + cachedResults: undefined, + loadPriority: undefined, + dataNodeCollectionId: vizKey, + variablesOverride: undefined, + } + + return ( + + + + + + + + + + + + ) +} + +function InternalDataTableVisualization(props: DataTableVisualizationProps): JSX.Element { + const logic = insightLogic({ + dashboardItemId: DATAWAREHOUSE_EDITOR_ITEM_ID, + cachedInsight: null, + }) + const { saveAs } = useActions(logic) + + const { + query, + visualizationType, + showEditingUI, + showResultControls, + response, + responseLoading, + responseError, + queryCancelled, + isChartSettingsPanelOpen, + } = useValues(dataVisualizationLogic) + + const { toggleChartSettingsPanel } = useActions(dataVisualizationLogic) + + let component: JSX.Element | null = null + + // TODO(@Gilbert09): Better loading support for all components - e.g. using the `loading` param of `Table` + if (!showEditingUI && (!response || responseLoading)) { + component = ( +
    + +
    + ) + } else if (visualizationType === ChartDisplayType.ActionsTable) { + component = ( + + ) + } else if ( + visualizationType === ChartDisplayType.ActionsLineGraph || + visualizationType === ChartDisplayType.ActionsBar || + visualizationType === ChartDisplayType.ActionsAreaGraph || + visualizationType === ChartDisplayType.ActionsStackedBar + ) { + component = + } else if (visualizationType === ChartDisplayType.BoldNumber) { + component = + } + + return ( +
    +
    +
    + {isChartSettingsPanelOpen && ( +
    + +
    + )} +
    + {visualizationType !== ChartDisplayType.ActionsTable && responseError ? ( +
    + +
    + ) : ( + component + )} +
    +
    + {showResultControls && ( + <> +
    +
    +
    +
    + + + } + type={isChartSettingsPanelOpen ? 'primary' : 'secondary'} + onClick={() => toggleChartSettingsPanel()} + tooltip="Visualization settings" + /> + + {props.exportContext && ( + + )} + + saveAs(true, false)}> + Create insight + +
    +
    +
    + + )} + + +
    +
    + ) +} diff --git a/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx b/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx index 9f599049415e2..fa8f52c604781 100644 --- a/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx +++ b/frontend/src/scenes/data-warehouse/editor/QueryWindow.tsx @@ -5,9 +5,9 @@ import type { editor as importedEditor } from 'monaco-editor' import { useState } from 'react' import { multitabEditorLogic } from './multitabEditorLogic' +import { OutputPane } from './OutputPane' import { QueryPane } from './QueryPane' import { QueryTabs } from './QueryTabs' -import { ResultPane } from './ResultPane' export function QueryWindow(): JSX.Element { const [monacoAndEditor, setMonacoAndEditor] = useState( @@ -68,7 +68,7 @@ export function QueryWindow(): JSX.Element { }, }} /> - void - saveDisabledReason?: string - onQueryInputChange: () => void - logicKey: string - query: string -} - -export function ResultPane({ - onQueryInputChange, - onSave, - saveDisabledReason, - logicKey, - query, -}: ResultPaneProps): JSX.Element { - const codeEditorKey = `hogQLQueryEditor/${router.values.location.pathname}` - - const { editingView, queryInput } = useValues( - multitabEditorLogic({ - key: codeEditorKey, - }) - ) - const { isDarkModeOn } = useValues(themeLogic) - const { response, responseLoading } = useValues( - dataNodeLogic({ - key: logicKey, - query: { - kind: NodeKind.HogQLQuery, - query, - }, - doNotLoad: !query, - }) - ) - const { dataWarehouseSavedQueriesLoading } = useValues(dataWarehouseViewsLogic) - const { updateDataWarehouseSavedQuery } = useActions(dataWarehouseViewsLogic) - - const columns = useMemo(() => { - return ( - response?.columns?.map((column: string) => ({ - key: column, - name: column, - resizable: true, - })) ?? [] - ) - }, [response]) - - const rows = useMemo(() => { - if (!response?.results) { - return [] - } - return response?.results?.map((row: any[]) => { - const rowObject: Record = {} - response.columns.forEach((column: string, i: number) => { - rowObject[column] = row[i] - }) - return rowObject - }) - }, [response]) - - return ( -
    -
    - {}} - tabs={[ - { - key: ResultsTab.Results, - label: 'Results', - }, - ]} - /> -
    - {editingView ? ( - <> - - updateDataWarehouseSavedQuery({ - id: editingView.id, - query: { - kind: NodeKind.HogQLQuery, - query: queryInput, - }, - }) - } - > - Update - - - ) : ( - onSave()} disabledReason={saveDisabledReason}> - Save - - )} - onQueryInputChange()}> - Run - - -
    -
    -
    - {responseLoading ? ( - - ) : !response ? ( - Query results will appear here - ) : ( -
    - -
    - )} -
    -
    - ) -} diff --git a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts index 8239bd166551d..c7128d138beea 100644 --- a/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts +++ b/frontend/src/scenes/data-warehouse/editor/editorSidebarLogic.ts @@ -2,6 +2,8 @@ import Fuse from 'fuse.js' import { connect, kea, path, selectors } from 'kea' import { router } from 'kea-router' import { subscriptions } from 'kea-subscriptions' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' import { sceneLogic } from 'scenes/sceneLogic' import { Scene } from 'scenes/sceneTypes' @@ -14,6 +16,7 @@ import { DatabaseSchemaDataWarehouseTable, DatabaseSchemaTable } from '~/queries import { DataWarehouseSavedQuery, PipelineTab } from '~/types' import { dataWarehouseViewsLogic } from '../saved_queries/dataWarehouseViewsLogic' +import { viewLinkLogic } from '../viewLinkLogic' import { editorSceneLogic } from './editorSceneLogic' import type { editorSidebarLogicType } from './editorSidebarLogicType' import { multitabEditorLogic } from './multitabEditorLogic' @@ -39,6 +42,20 @@ const savedQueriesfuse = new Fuse([], { includeMatches: true, }) +const nonMaterializedViewsfuse = new Fuse([], { + keys: [{ name: 'name', weight: 2 }], + threshold: 0.3, + ignoreLocation: true, + includeMatches: true, +}) + +const materializedViewsfuse = new Fuse([], { + keys: [{ name: 'name', weight: 2 }], + threshold: 0.3, + ignoreLocation: true, + includeMatches: true, +}) + export const editorSidebarLogic = kea([ path(['data-warehouse', 'editor', 'editorSidebarLogic']), connect({ @@ -49,8 +66,17 @@ export const editorSidebarLogic = kea([ ['dataWarehouseSavedQueries', 'dataWarehouseSavedQueryMapById', 'dataWarehouseSavedQueriesLoading'], databaseTableListLogic, ['posthogTables', 'dataWarehouseTables', 'databaseLoading', 'views', 'viewsMapById'], + featureFlagLogic, + ['featureFlags'], + ], + actions: [ + editorSceneLogic, + ['selectSchema'], + dataWarehouseViewsLogic, + ['deleteDataWarehouseSavedQuery', 'runDataWarehouseSavedQuery'], + viewLinkLogic, + ['selectSourceTable', 'toggleJoinTableModal'], ], - actions: [editorSceneLogic, ['selectSchema'], dataWarehouseViewsLogic, ['deleteDataWarehouseSavedQuery']], }), selectors(({ actions }) => ({ contents: [ @@ -60,13 +86,19 @@ export const editorSidebarLogic = kea([ s.relevantPosthogTables, s.relevantDataWarehouseTables, s.databaseLoading, + s.relevantNonMaterializedViews, + s.relevantMaterializedViews, + s.featureFlags, ], ( relevantSavedQueries, dataWarehouseSavedQueriesLoading, relevantPosthogTables, relevantDataWarehouseTables, - databaseLoading + databaseLoading, + relevantNonMaterializedViews, + relevantMaterializedViews, + featureFlags ) => [ { key: 'data-warehouse-sources', @@ -85,6 +117,15 @@ export const editorSidebarLogic = kea([ onClick: () => { actions.selectSchema(table) }, + menuItems: [ + { + label: 'Add join', + onClick: () => { + actions.selectSourceTable(table.name) + actions.toggleJoinTableModal() + }, + }, + ], })), onAdd: () => { router.actions.push(urls.pipeline(PipelineTab.Sources)) @@ -107,13 +148,25 @@ export const editorSidebarLogic = kea([ onClick: () => { actions.selectSchema(table) }, + menuItems: [ + { + label: 'Add join', + onClick: () => { + actions.selectSourceTable(table.name) + actions.toggleJoinTableModal() + }, + }, + ], })), } as SidebarCategory, { key: 'data-warehouse-views', noun: ['view', 'views'], loading: dataWarehouseSavedQueriesLoading, - items: relevantSavedQueries.map(([savedQuery, matches]) => ({ + items: (featureFlags[FEATURE_FLAGS.DATA_MODELING] + ? relevantNonMaterializedViews + : relevantSavedQueries + ).map(([savedQuery, matches]) => ({ key: savedQuery.id, name: savedQuery.name, url: '', @@ -135,6 +188,23 @@ export const editorSidebarLogic = kea([ }).actions.createTab(savedQuery.query.query, savedQuery) }, }, + { + label: 'Add join', + onClick: () => { + actions.selectSourceTable(savedQuery.name) + actions.toggleJoinTableModal() + }, + }, + ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] && !savedQuery.status + ? [ + { + label: 'Materialize', + onClick: () => { + actions.runDataWarehouseSavedQuery(savedQuery.id) + }, + }, + ] + : []), { label: 'Delete', status: 'danger', @@ -145,8 +215,77 @@ export const editorSidebarLogic = kea([ ], })), } as SidebarCategory, + ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] + ? [ + { + key: 'data-warehouse-materialized-views', + noun: ['materialized view', 'materialized views'], + loading: dataWarehouseSavedQueriesLoading, + items: relevantMaterializedViews.map(([materializedView, matches]) => ({ + key: materializedView.id, + name: materializedView.name, + url: '', + searchMatch: matches + ? { + matchingFields: matches.map((match) => match.key), + nameHighlightRanges: matches.find((match) => match.key === 'name')?.indices, + } + : null, + onClick: () => { + actions.selectSchema(materializedView) + }, + menuItems: [ + { + label: 'Edit view definition', + onClick: () => { + multitabEditorLogic({ + key: `hogQLQueryEditor/${router.values.location.pathname}`, + }).actions.createTab(materializedView.query.query, materializedView) + }, + }, + { + label: 'Add join', + onClick: () => { + actions.selectSourceTable(materializedView.name) + actions.toggleJoinTableModal() + }, + }, + ...(featureFlags[FEATURE_FLAGS.DATA_MODELING] && materializedView.status + ? [ + { + label: 'Run', + onClick: () => { + actions.runDataWarehouseSavedQuery(materializedView.id) + }, + }, + ] + : []), + { + label: 'Delete', + status: 'danger', + onClick: () => { + actions.deleteDataWarehouseSavedQuery(materializedView.id) + }, + }, + ], + })), + }, + ] + : []), ], ], + nonMaterializedViews: [ + (s) => [s.dataWarehouseSavedQueries], + (views): DataWarehouseSavedQuery[] => { + return views.filter((view) => !view.status && !view.last_run_at) + }, + ], + materializedViews: [ + (s) => [s.dataWarehouseSavedQueries], + (views): DataWarehouseSavedQuery[] => { + return views.filter((view) => view.status || view.last_run_at) + }, + ], activeListItemKey: [ (s) => [s.activeScene, s.sceneParams], (activeScene, sceneParams): [string, number] | null => { @@ -188,6 +327,28 @@ export const editorSidebarLogic = kea([ return dataWarehouseSavedQueries.map((savedQuery) => [savedQuery, null]) }, ], + relevantNonMaterializedViews: [ + (s) => [s.nonMaterializedViews, navigation3000Logic.selectors.searchTerm], + (nonMaterializedViews, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => { + if (searchTerm) { + return nonMaterializedViewsfuse + .search(searchTerm) + .map((result) => [result.item, result.matches as FuseSearchMatch[]]) + } + return nonMaterializedViews.map((view) => [view, null]) + }, + ], + relevantMaterializedViews: [ + (s) => [s.materializedViews, navigation3000Logic.selectors.searchTerm], + (materializedViews, searchTerm): [DataWarehouseSavedQuery, FuseSearchMatch[] | null][] => { + if (searchTerm) { + return materializedViewsfuse + .search(searchTerm) + .map((result) => [result.item, result.matches as FuseSearchMatch[]]) + } + return materializedViews.map((view) => [view, null]) + }, + ], })), subscriptions({ dataWarehouseTables: (dataWarehouseTables) => { diff --git a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx index cad1c656c0b91..c6b9e4ed575c9 100644 --- a/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx +++ b/frontend/src/scenes/data-warehouse/editor/multitabEditorLogic.tsx @@ -295,6 +295,7 @@ export const multitabEditorLogic = kea([ kind: NodeKind.HogQLQuery, query: queryOverride || values.queryInput, }, + alwaysRefresh: true, }).actions.loadData(true) } actions.setActiveQuery(queryOverride || values.queryInput) @@ -322,7 +323,18 @@ export const multitabEditorLogic = kea([ kind: NodeKind.HogQLQuery, query: values.queryInput, } - await dataWarehouseViewsLogic.asyncActions.createDataWarehouseSavedQuery({ name, query }) + + const logic = dataNodeLogic({ + key: values.activeTabKey, + query: { + kind: NodeKind.HogQLQuery, + query: values.queryInput, + }, + }) + + const types = logic.values.response?.types ?? [] + + await dataWarehouseViewsLogic.asyncActions.createDataWarehouseSavedQuery({ name, query, types }) }, reloadMetadata: async (_, breakpoint) => { const model = props.editor?.getModel() diff --git a/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts b/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts new file mode 100644 index 0000000000000..659c79b440635 --- /dev/null +++ b/frontend/src/scenes/data-warehouse/editor/outputPaneLogic.ts @@ -0,0 +1,23 @@ +import { actions, kea, path, reducers } from 'kea' + +import type { outputPaneLogicType } from './outputPaneLogicType' + +export enum OutputTab { + Results = 'results', + Visualization = 'visualization', +} + +export const outputPaneLogic = kea([ + path(['data-warehouse', 'editor', 'outputPaneLogic']), + actions({ + setActiveTab: (tab: OutputTab) => ({ tab }), + }), + reducers({ + activeTab: [ + OutputTab.Results as OutputTab, + { + setActiveTab: (_, { tab }) => tab, + }, + ], + }), +]) diff --git a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx index 37c744e633d9b..d66a0285526ba 100644 --- a/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx +++ b/frontend/src/scenes/data-warehouse/saved_queries/dataWarehouseViewsLogic.tsx @@ -35,7 +35,9 @@ export const dataWarehouseViewsLogic = kea([ } return savedQueries.results }, - createDataWarehouseSavedQuery: async (view: Partial) => { + createDataWarehouseSavedQuery: async ( + view: Partial & { types: string[][] } + ) => { const newView = await api.dataWarehouseSavedQueries.create(view) lemonToast.success(`${newView.name ?? 'View'} successfully created`) @@ -46,7 +48,9 @@ export const dataWarehouseViewsLogic = kea([ await api.dataWarehouseSavedQueries.delete(viewId) return values.dataWarehouseSavedQueries.filter((view) => view.id !== viewId) }, - updateDataWarehouseSavedQuery: async (view: Partial & { id: string }) => { + updateDataWarehouseSavedQuery: async ( + view: Partial & { id: string; types: string[][] } + ) => { const newView = await api.dataWarehouseSavedQueries.update(view.id, view) return values.dataWarehouseSavedQueries.map((savedQuery) => { if (savedQuery.id === view.id) { diff --git a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSceneLogic.ts b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSceneLogic.ts index 85e8283b57fad..c3f05d5562b76 100644 --- a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSceneLogic.ts +++ b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSceneLogic.ts @@ -11,6 +11,7 @@ import { urls } from 'scenes/urls' import { DatabaseSchemaMaterializedViewTable, DatabaseSchemaTable, + DatabaseSchemaViewTable, DatabaseSerializedFieldType, HogQLQuery, NodeKind, @@ -55,7 +56,7 @@ export const dataWarehouseSceneLogic = kea([ deleteDataWarehouseTable: (tableId: string) => ({ tableId }), toggleSchemaModal: true, setEditingView: (id: string | null) => ({ id }), - updateView: (query: string) => ({ query }), + updateView: (query: string, types: string[][]) => ({ query, types }), })), reducers({ selectedRow: [ @@ -278,16 +279,17 @@ export const dataWarehouseSceneLogic = kea([ }) } }, - updateView: ({ query }) => { + updateView: ({ query, types }) => { if (values.editingView) { const newViewQuery: HogQLQuery = { kind: NodeKind.HogQLQuery, query: query, } const oldView = values.viewsMapById[values.editingView] - const newView = { + const newView: DatabaseSchemaViewTable & { types: string[][] } = { ...oldView, query: newViewQuery, + types, } actions.updateDataWarehouseSavedQuery(newView) } diff --git a/frontend/src/scenes/data-warehouse/viewLinkLogic.tsx b/frontend/src/scenes/data-warehouse/viewLinkLogic.tsx index b55875358c7ed..9d2a7cd171d5b 100644 --- a/frontend/src/scenes/data-warehouse/viewLinkLogic.tsx +++ b/frontend/src/scenes/data-warehouse/viewLinkLogic.tsx @@ -41,6 +41,8 @@ export const viewLinkLogic = kea([ deleteViewLink: (table, column) => ({ table, column }), setError: (error: string) => ({ error }), setFieldName: (fieldName: string) => ({ fieldName }), + setExperimentsOptimized: (experimentsOptimized: boolean) => ({ experimentsOptimized }), + selectExperimentsTimestampKey: (experimentsTimestampKey: string | null) => ({ experimentsTimestampKey }), clearModalFields: true, })), reducers({ @@ -101,6 +103,22 @@ export const viewLinkLogic = kea([ clearModalFields: () => '', }, ], + experimentsOptimized: [ + false as boolean, + { + setExperimentsOptimized: (_, { experimentsOptimized }) => experimentsOptimized, + toggleEditJoinModal: (_, { join }) => join.configuration?.experiments_optimized ?? false, + clearModalFields: () => false, + }, + ], + experimentsTimestampKey: [ + null as string | null, + { + selectExperimentsTimestampKey: (_, { experimentsTimestampKey }) => experimentsTimestampKey, + toggleEditJoinModal: (_, { join }) => join.configuration?.experiments_timestamp_key ?? null, + clearModalFields: () => null, + }, + ], isJoinTableModalOpen: [ false, { @@ -136,6 +154,10 @@ export const viewLinkLogic = kea([ joining_table_name, joining_table_key: values.selectedJoiningKey ?? undefined, field_name: values.fieldName, + configuration: { + experiments_optimized: values.experimentsOptimized, + experiments_timestamp_key: values.experimentsTimestampKey ?? undefined, + }, }) actions.toggleJoinTableModal() @@ -156,6 +178,10 @@ export const viewLinkLogic = kea([ joining_table_name, joining_table_key: values.selectedJoiningKey ?? undefined, field_name: values.fieldName, + configuration: { + experiments_optimized: values.experimentsOptimized, + experiments_timestamp_key: values.experimentsTimestampKey ?? undefined, + }, }) actions.toggleJoinTableModal() @@ -175,6 +201,16 @@ export const viewLinkLogic = kea([ toggleEditJoinModal: ({ join }) => { actions.setViewLinkValues(join) }, + setExperimentsOptimized: ({ experimentsOptimized }) => { + if (!experimentsOptimized) { + actions.selectExperimentsTimestampKey(null) + } + }, + selectExperimentsTimestampKey: ({ experimentsTimestampKey }) => { + if (experimentsTimestampKey) { + actions.setExperimentsOptimized(true) + } + }, })), selectors({ selectedSourceTable: [ diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx index a7e105958f356..d381d62640a0f 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingConfigurationScene.tsx @@ -18,6 +18,11 @@ export const scene: SceneExport = { export function ErrorTrackingConfigurationScene(): JSX.Element { const { missingSymbolSets, validSymbolSets } = useValues(errorTrackingSymbolSetLogic) + const { loadSymbolSets } = useActions(errorTrackingSymbolSetLogic) + + useEffect(() => { + loadSymbolSets() + }, [loadSymbolSets]) return (
    @@ -27,18 +32,22 @@ export function ErrorTrackingConfigurationScene(): JSX.Element { automatically retrieves source maps where possible. Cases where it was not possible are listed below. Source maps can be uploaded retroactively but changes will only apply to all future exceptions ingested.

    - {missingSymbolSets.length > 0 && } - {validSymbolSets.length > 0 && } + {missingSymbolSets.length > 0 && ( + + )} + {validSymbolSets.length > 0 && }
    ) } const SymbolSetTable = ({ + id, dataSource, pageSize, missing, }: { + id: string dataSource: ErrorTrackingSymbolSet[] pageSize: number missing?: boolean @@ -98,6 +107,7 @@ const SymbolSetTable = ({ return ( { export const Options = ({ isGroup = false }: { isGroup?: boolean }): JSX.Element => { const { dateRange, assignee, hasGroupActions } = useValues(errorTrackingLogic) const { setDateRange, setAssignee } = useActions(errorTrackingLogic) - const { order } = useValues(errorTrackingSceneLogic) - const { setOrder } = useActions(errorTrackingSceneLogic) + const { orderBy } = useValues(errorTrackingSceneLogic) + const { setOrderBy } = useActions(errorTrackingSceneLogic) return (
    @@ -110,9 +110,9 @@ export const Options = ({ isGroup = false }: { isGroup?: boolean }): JSX.Element
    Sort by: { } className="flex-1" to={urls.errorTrackingIssue(record.id)} + onClick={() => { + const issueLogic = errorTrackingIssueSceneLogic({ id: record.id }) + issueLogic.mount() + issueLogic.actions.setIssue(record) + }} />
    ) diff --git a/frontend/src/scenes/error-tracking/errorTrackingIssueSceneLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingIssueSceneLogic.ts index 0979ba8037da1..08f2de773227b 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingIssueSceneLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingIssueSceneLogic.ts @@ -7,7 +7,7 @@ import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' import { ErrorTrackingIssue } from '~/queries/schema' -import { Breadcrumb } from '~/types' +import { Breadcrumb, EventType } from '~/types' import type { errorTrackingIssueSceneLogicType } from './errorTrackingIssueSceneLogicType' import { errorTrackingLogic } from './errorTrackingLogic' @@ -16,7 +16,7 @@ import { errorTrackingIssueEventsQuery, errorTrackingIssueQuery } from './querie export interface ErrorTrackingEvent { uuid: string timestamp: Dayjs - properties: Record + properties: EventType['properties'] person: { distinct_id: string uuid?: string @@ -46,6 +46,7 @@ export const errorTrackingIssueSceneLogic = kea ({ tab }), setActiveEventUUID: (uuid: ErrorTrackingEvent['uuid']) => ({ uuid }), + setIssue: (issue: ErrorTrackingIssue) => ({ issue }), updateIssue: (issue: Partial>) => ({ issue }), }), @@ -89,6 +90,7 @@ export const errorTrackingIssueSceneLogic = kea issue, }, ], events: [ diff --git a/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts index e1128d177e0de..bd36ead868256 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingSceneLogic.ts @@ -26,16 +26,16 @@ export const errorTrackingSceneLogic = kea([ }), actions({ - setOrder: (order: ErrorTrackingQuery['order']) => ({ order }), + setOrderBy: (orderBy: ErrorTrackingQuery['orderBy']) => ({ orderBy }), setSelectedIssueIds: (ids: string[]) => ({ ids }), }), reducers({ - order: [ - 'last_seen' as ErrorTrackingQuery['order'], + orderBy: [ + 'last_seen' as ErrorTrackingQuery['orderBy'], { persist: true }, { - setOrder: (_, { order }) => order, + setOrderBy: (_, { orderBy }) => orderBy, }, ], selectedIssueIds: [ @@ -49,7 +49,7 @@ export const errorTrackingSceneLogic = kea([ selectors({ query: [ (s) => [ - s.order, + s.orderBy, s.dateRange, s.assignee, s.filterTestAccounts, @@ -59,7 +59,7 @@ export const errorTrackingSceneLogic = kea([ s.hasGroupActions, ], ( - order, + orderBy, dateRange, assignee, filterTestAccounts, @@ -69,7 +69,7 @@ export const errorTrackingSceneLogic = kea([ hasGroupActions ): DataTableNode => errorTrackingQuery({ - order, + orderBy, dateRange, assignee, filterTestAccounts, diff --git a/frontend/src/scenes/error-tracking/errorTrackingSymbolSetLogic.tsx b/frontend/src/scenes/error-tracking/errorTrackingSymbolSetLogic.tsx index f52e94729b13a..e1c7cb57173cd 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingSymbolSetLogic.tsx +++ b/frontend/src/scenes/error-tracking/errorTrackingSymbolSetLogic.tsx @@ -1,5 +1,5 @@ import { lemonToast } from '@posthog/lemon-ui' -import { actions, afterMount, kea, path, reducers, selectors } from 'kea' +import { actions, kea, path, reducers, selectors } from 'kea' import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import api from 'lib/api' @@ -103,8 +103,4 @@ export const errorTrackingSymbolSetLogic = kea( }, }, })), - - afterMount(({ actions }) => { - actions.loadSymbolSets() - }), ]) diff --git a/frontend/src/scenes/error-tracking/groups/OverviewTab.tsx b/frontend/src/scenes/error-tracking/groups/OverviewTab.tsx index 883195c5001e4..d449562c41281 100644 --- a/frontend/src/scenes/error-tracking/groups/OverviewTab.tsx +++ b/frontend/src/scenes/error-tracking/groups/OverviewTab.tsx @@ -1,12 +1,10 @@ import { PersonDisplay, TZLabel } from '@posthog/apps-common' -import { LemonButton } from '@posthog/lemon-ui' import clsx from 'clsx' import { useActions, useValues } from 'kea' import { EmptyMessage } from 'lib/components/EmptyMessage/EmptyMessage' import { ErrorDisplay } from 'lib/components/Errors/ErrorDisplay' import { Playlist } from 'lib/components/Playlist/Playlist' -import { dayjs } from 'lib/dayjs' -import { sessionPlayerModalLogic } from 'scenes/session-recordings/player/modal/sessionPlayerModalLogic' +import ViewRecordingButton, { mightHaveRecording } from 'lib/components/ViewRecordingButton' import { PropertyIcons } from 'scenes/session-recordings/playlist/SessionRecordingPreview' import { ErrorTrackingEvent, errorTrackingIssueSceneLogic } from '../errorTrackingIssueSceneLogic' @@ -38,7 +36,16 @@ export const OverviewTab = (): JSX.Element => { event ? (
    - +
    @@ -62,25 +69,6 @@ export const OverviewTab = (): JSX.Element => { ) } -const ViewSessionButton = ({ event }: { event: ErrorTrackingEvent }): JSX.Element | null => { - const { openSessionPlayer } = useActions(sessionPlayerModalLogic) - - const sessionId = event.properties.$session_id - - return ( - { - const fiveSecondsBeforeEvent = dayjs(event.timestamp).valueOf() - 5000 - openSessionPlayer({ id: sessionId }, Math.max(fiveSecondsBeforeEvent, 0)) - }} - disabledReason={!sessionId ? 'There was no Session ID associated with this exception' : undefined} - > - View recording - - ) -} - const ListItemException = ({ item: { timestamp, properties, person }, isActive, diff --git a/frontend/src/scenes/error-tracking/queries.ts b/frontend/src/scenes/error-tracking/queries.ts index 0fdf5b3b7fcb8..781c8d75fa4e6 100644 --- a/frontend/src/scenes/error-tracking/queries.ts +++ b/frontend/src/scenes/error-tracking/queries.ts @@ -40,7 +40,7 @@ const toStartOfIntervalFn = { } export const errorTrackingQuery = ({ - order, + orderBy, dateRange, assignee, filterTestAccounts, @@ -49,7 +49,7 @@ export const errorTrackingQuery = ({ sparklineSelectedPeriod, columns, limit = 50, -}: Pick & { +}: Pick & { filterGroup: UniversalFiltersGroup sparklineSelectedPeriod: string | null columns: ('error' | 'volume' | 'occurrences' | 'sessions' | 'users' | 'assignee')[] @@ -69,7 +69,7 @@ export const errorTrackingQuery = ({ source: { kind: NodeKind.ErrorTrackingQuery, select: select, - order: order, + orderBy: orderBy, dateRange: dateRange, assignee: assignee, filterGroup: filterGroup as PropertyGroupFilter, @@ -157,7 +157,7 @@ export const errorTrackingIssueEventsQuery = ({ // TODO: fix this where clause. It does not take into account the events // associated with issues that have been merged into this primary issue - const where = [`eq(${issueId}, properties.$exception_issue_id)`] + const where = [`'${issueId}' == properties.$exception_issue_id`] const query: EventsQuery = { kind: NodeKind.EventsQuery, diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx index f71d0bb3c0b73..d1fd0b140bc4d 100644 --- a/frontend/src/scenes/experiments/ExperimentForm.tsx +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -5,7 +5,7 @@ import { LemonDivider, LemonInput, LemonTextArea, Tooltip } from '@posthog/lemon import { useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' import { ExperimentVariantNumber } from 'lib/components/SeriesGlyph' -import { FEATURE_FLAGS, MAX_EXPERIMENT_VARIANTS } from 'lib/constants' +import { MAX_EXPERIMENT_VARIANTS } from 'lib/constants' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonRadio } from 'lib/lemon-ui/LemonRadio' @@ -14,17 +14,14 @@ import { capitalizeFirstLetter } from 'lib/utils' import { experimentsLogic } from 'scenes/experiments/experimentsLogic' import { experimentLogic } from './experimentLogic' -import { ExperimentsDisabledBanner } from './Experiments' const ExperimentFormFields = (): JSX.Element => { - const { experiment, featureFlags, groupTypes, aggregationLabel, dynamicFeatureFlagKey } = useValues(experimentLogic) + const { experiment, groupTypes, aggregationLabel, dynamicFeatureFlagKey } = useValues(experimentLogic) const { addExperimentGroup, removeExperimentGroup, setExperiment, createExperiment, setExperimentType } = useActions(experimentLogic) const { webExperimentsAvailable } = useValues(experimentsLogic) - return featureFlags[FEATURE_FLAGS.EXPERIMENTS_MIGRATION_DISABLE_UI] ? ( - - ) : ( + return (
    diff --git a/frontend/src/scenes/experiments/ExperimentView/DeltaViz.tsx b/frontend/src/scenes/experiments/ExperimentView/DeltaViz.tsx new file mode 100644 index 0000000000000..77a7b9d0359b3 --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/DeltaViz.tsx @@ -0,0 +1,449 @@ +import { useValues } from 'kea' +import { useEffect, useRef, useState } from 'react' + +import { InsightType } from '~/types' + +import { experimentLogic } from '../experimentLogic' +import { VariantTag } from './components' + +const BAR_HEIGHT = 8 +const BAR_PADDING = 10 +const TICK_PANEL_HEIGHT = 20 +const VIEW_BOX_WIDTH = 800 +const HORIZONTAL_PADDING = 20 +const CONVERSION_RATE_RECT_WIDTH = 2 +const TICK_FONT_SIZE = 7 + +const COLORS = { + BOUNDARY_LINES: '#d0d0d0', + ZERO_LINE: '#666666', + BAR_NEGATIVE: '#F44435', + BAR_BEST: '#4DAF4F', + BAR_DEFAULT: '#d9d9d9', + BAR_CONTROL: 'rgba(217, 217, 217, 0.4)', + BAR_MIDDLE_POINT: 'black', + BAR_MIDDLE_POINT_CONTROL: 'rgba(0, 0, 0, 0.4)', +} + +// Helper function to find nice round numbers for ticks +export function getNiceTickValues(maxAbsValue: number): number[] { + // Round up maxAbsValue to ensure we cover all values + maxAbsValue = Math.ceil(maxAbsValue * 10) / 10 + + const magnitude = Math.floor(Math.log10(maxAbsValue)) + const power = Math.pow(10, magnitude) + + let baseUnit + const normalizedMax = maxAbsValue / power + if (normalizedMax <= 1) { + baseUnit = 0.2 * power + } else if (normalizedMax <= 2) { + baseUnit = 0.5 * power + } else if (normalizedMax <= 5) { + baseUnit = 1 * power + } else { + baseUnit = 2 * power + } + + // Calculate how many baseUnits we need to exceed maxAbsValue + const unitsNeeded = Math.ceil(maxAbsValue / baseUnit) + + // Determine appropriate number of decimal places based on magnitude + const decimalPlaces = Math.max(0, -magnitude + 1) + + const ticks: number[] = [] + for (let i = -unitsNeeded; i <= unitsNeeded; i++) { + // Round each tick value to avoid floating point precision issues + const tickValue = Number((baseUnit * i).toFixed(decimalPlaces)) + ticks.push(tickValue) + } + return ticks +} + +function formatTickValue(value: number): string { + if (value === 0) { + return '0%' + } + + // Determine number of decimal places needed + const absValue = Math.abs(value) + let decimals = 0 + + if (absValue < 0.01) { + decimals = 3 + } else if (absValue < 0.1) { + decimals = 2 + } else if (absValue < 1) { + decimals = 1 + } else { + decimals = 0 + } + + return `${(value * 100).toFixed(decimals)}%` +} + +export function DeltaViz(): JSX.Element { + const { experiment, experimentResults, getMetricType, metricResults } = useValues(experimentLogic) + + if (!experimentResults) { + return <> + } + + const variants = experiment.parameters.feature_flag_variants + const allResults = [...(metricResults || [])] + + return ( +
    +
    + {allResults.map((results, metricIndex) => { + if (!results) { + return null + } + + const isFirstMetric = metricIndex === 0 + + return ( +
    + +
    + ) + })} +
    +
    + ) +} + +function Chart({ + results, + variants, + metricType, + isFirstMetric, +}: { + results: any + variants: any[] + metricType: InsightType + isFirstMetric: boolean +}): JSX.Element { + const { credibleIntervalForVariant, conversionRateForVariant, experimentId } = useValues(experimentLogic) + const [tooltipData, setTooltipData] = useState<{ x: number; y: number; variant: string } | null>(null) + + // Update chart height calculation to include only one BAR_PADDING for each space between bars + const chartHeight = BAR_PADDING + (BAR_HEIGHT + BAR_PADDING) * variants.length + + // Find the maximum absolute value from all credible intervals + const maxAbsValue = Math.max( + ...variants.flatMap((variant) => { + const interval = credibleIntervalForVariant(results, variant.key, metricType) + return interval ? [Math.abs(interval[0] / 100), Math.abs(interval[1] / 100)] : [] + }) + ) + + // Add padding to the range + const padding = Math.max(maxAbsValue * 0.05, 0.02) + const chartBound = maxAbsValue + padding + + const tickValues = getNiceTickValues(chartBound) + const maxTick = Math.max(...tickValues) + + const valueToX = (value: number): number => { + // Scale the value to fit within the padded area + const percentage = (value / maxTick + 1) / 2 + return HORIZONTAL_PADDING + percentage * (VIEW_BOX_WIDTH - 2 * HORIZONTAL_PADDING) + } + + const infoPanelWidth = '10%' + + const ticksSvgRef = useRef(null) + const chartSvgRef = useRef(null) + // :TRICKY: We need to track SVG heights dynamically because + // we're fitting regular divs to match SVG viewports. SVGs scale + // based on their viewBox and the viewport size, making it challenging + // to match their effective rendered heights with regular div elements. + const [ticksSvgHeight, setTicksSvgHeight] = useState(0) + const [chartSvgHeight, setChartSvgHeight] = useState(0) + + useEffect(() => { + const ticksSvg = ticksSvgRef.current + const chartSvg = chartSvgRef.current + + // eslint-disable-next-line compat/compat + const resizeObserver = new ResizeObserver((entries) => { + for (const entry of entries) { + if (entry.target === ticksSvg) { + setTicksSvgHeight(entry.contentRect.height) + } else if (entry.target === chartSvg) { + setChartSvgHeight(entry.contentRect.height) + } + } + }) + + if (ticksSvg) { + resizeObserver.observe(ticksSvg) + } + if (chartSvg) { + resizeObserver.observe(chartSvg) + } + + return () => { + resizeObserver.disconnect() + } + }, []) + + return ( +
    + {/* eslint-disable-next-line react/forbid-dom-props */} +
    + {isFirstMetric && ( + + )} + {isFirstMetric &&
    } + {/* eslint-disable-next-line react/forbid-dom-props */} +
    + {variants.map((variant) => ( +
    + +
    + ))} +
    +
    + + {/* SVGs container */} +
    + {/* Ticks */} + {isFirstMetric && ( + + {tickValues.map((value, index) => { + const x = valueToX(value) + return ( + + + {formatTickValue(value)} + + + ) + })} + + )} + {isFirstMetric &&
    } + {/* Chart */} + + {/* Vertical grid lines */} + {tickValues.map((value, index) => { + const x = valueToX(value) + return ( + + ) + })} + + {variants.map((variant, index) => { + const interval = credibleIntervalForVariant(results, variant.key, metricType) + const [lower, upper] = interval ? [interval[0] / 100, interval[1] / 100] : [0, 0] + + const variantRate = conversionRateForVariant(results, variant.key) + const controlRate = conversionRateForVariant(results, 'control') + const delta = variantRate && controlRate ? (variantRate - controlRate) / controlRate : 0 + + // Find the highest delta among all variants + const maxDelta = Math.max( + ...variants.map((v) => { + const vRate = conversionRateForVariant(results, v.key) + return vRate && controlRate ? (vRate - controlRate) / controlRate : 0 + }) + ) + + let barColor + if (variant.key === 'control') { + barColor = COLORS.BAR_DEFAULT + } else if (delta < 0) { + barColor = COLORS.BAR_NEGATIVE + } else if (delta === maxDelta) { + barColor = COLORS.BAR_BEST + } else { + barColor = COLORS.BAR_DEFAULT + } + + const y = BAR_PADDING + (BAR_HEIGHT + BAR_PADDING) * index + const x1 = valueToX(lower) + const x2 = valueToX(upper) + const deltaX = valueToX(delta) + + return ( + { + const rect = e.currentTarget.getBoundingClientRect() + setTooltipData({ + x: rect.left + rect.width / 2, + y: rect.top - 10, + variant: variant.key, + }) + }} + onMouseLeave={() => setTooltipData(null)} + > + {/* Invisible full-width rect to ensure consistent hover */} + + {/* Visible elements */} + + + + ) + })} + + + {/* Tooltip */} + {tooltipData && ( +
    +
    + +
    + Conversion rate: + + {conversionRateForVariant(results, tooltipData.variant)?.toFixed(2)}% + +
    +
    + Delta: + + {tooltipData.variant === 'control' ? ( + Baseline + ) : ( + (() => { + const variantRate = conversionRateForVariant(results, tooltipData.variant) + const controlRate = conversionRateForVariant(results, 'control') + const delta = + variantRate && controlRate + ? (variantRate - controlRate) / controlRate + : 0 + return delta ? ( + 0 ? 'text-success' : 'text-danger'}> + {`${delta > 0 ? '+' : ''}${(delta * 100).toFixed(2)}%`} + + ) : ( + '—' + ) + })() + )} + +
    +
    + Credible interval: + + {(() => { + const interval = credibleIntervalForVariant( + results, + tooltipData.variant, + metricType + ) + const [lower, upper] = interval + ? [interval[0] / 100, interval[1] / 100] + : [0, 0] + return `[${lower > 0 ? '+' : ''}${(lower * 100).toFixed(2)}%, ${ + upper > 0 ? '+' : '' + }${(upper * 100).toFixed(2)}%]` + })()} + +
    +
    +
    + )} +
    +
    + ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx b/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx index 446ee2fdf4500..8225391583fc9 100644 --- a/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx @@ -2,13 +2,11 @@ import '../Experiment.scss' import { LemonDivider, LemonTabs } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' import { PostHogFeature } from 'posthog-js/react' import { WebExperimentImplementationDetails } from 'scenes/experiments/WebExperimentImplementationDetails' import { ExperimentImplementationDetails } from '../ExperimentImplementationDetails' import { experimentLogic } from '../experimentLogic' -import { ExperimentsDisabledBanner } from '../Experiments' import { ExperimentLoadingAnimation, LoadingState, @@ -69,16 +67,14 @@ const VariantsTab = (): JSX.Element => { } export function ExperimentView(): JSX.Element { - const { experimentLoading, experimentResultsLoading, experimentId, experimentResults, tabKey, featureFlags } = + const { experimentLoading, experimentResultsLoading, experimentId, experimentResults, tabKey } = useValues(experimentLogic) const { setTabKey } = useActions(experimentLogic) const hasResultsInsight = experimentResults && experimentResults.insight - return featureFlags[FEATURE_FLAGS.EXPERIMENTS_MIGRATION_DISABLE_UI] ? ( - - ) : ( + return ( <>
    diff --git a/frontend/src/scenes/experiments/ExperimentView/Results.tsx b/frontend/src/scenes/experiments/ExperimentView/Results.tsx index 0afe6482ae6bb..1f34f96fd7518 100644 --- a/frontend/src/scenes/experiments/ExperimentView/Results.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/Results.tsx @@ -1,18 +1,21 @@ import '../Experiment.scss' import { useValues } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' import { experimentLogic } from '../experimentLogic' import { ResultsHeader, ResultsQuery } from './components' +import { DeltaViz } from './DeltaViz' import { SummaryTable } from './SummaryTable' export function Results(): JSX.Element { - const { experimentResults } = useValues(experimentLogic) + const { experimentResults, featureFlags } = useValues(experimentLogic) return (
    + {featureFlags[FEATURE_FLAGS.EXPERIMENTS_MULTIPLE_METRICS] && }
    ) diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index fd8751da6c2b6..c24376ac7e67c 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -56,9 +56,13 @@ import { getExperimentInsightColour, transformResultFilters } from '../utils' export function VariantTag({ experimentId, variantKey, + muted = false, + fontSize, }: { experimentId: ExperimentIdType variantKey: string + muted?: boolean + fontSize?: number }): JSX.Element { const { experiment, experimentResults, getIndexForVariant } = useValues(experimentLogic({ experimentId })) @@ -86,7 +90,13 @@ export function VariantTag({ backgroundColor: getExperimentInsightColour(getIndexForVariant(experimentResults, variantKey)), }} /> - {variantKey} + + {variantKey} + ) } diff --git a/frontend/src/scenes/experiments/Experiments.tsx b/frontend/src/scenes/experiments/Experiments.tsx index 6cd77e394393b..26c84171c6a8c 100644 --- a/frontend/src/scenes/experiments/Experiments.tsx +++ b/frontend/src/scenes/experiments/Experiments.tsx @@ -1,11 +1,10 @@ import { LemonDialog, LemonInput, LemonSelect } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { router } from 'kea-router' -import { DetectiveHog, ExperimentsHog } from 'lib/components/hedgehogs' +import { ExperimentsHog } from 'lib/components/hedgehogs' import { MemberSelect } from 'lib/components/MemberSelect' import { PageHeader } from 'lib/components/PageHeader' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' -import { FEATURE_FLAGS } from 'lib/constants' import { dayjs } from 'lib/dayjs' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { More } from 'lib/lemon-ui/LemonButton/More' @@ -16,7 +15,6 @@ import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import { Link } from 'lib/lemon-ui/Link' import stringWithWBR from 'lib/utils/stringWithWBR' -import posthog from 'posthog-js' import { SceneExport } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' @@ -31,44 +29,9 @@ export const scene: SceneExport = { logic: experimentsLogic, } -export const ExperimentsDisabledBanner = (): JSX.Element => { - const payload = posthog.getFeatureFlagPayload(FEATURE_FLAGS.EXPERIMENTS_MIGRATION_DISABLE_UI) - - return ( -
    -
    -
    -
    - -
    -
    -
    -

    We'll be right back!

    -

    - We’re upgrading experiments to a new schema to make them faster, more reliable, and ready for - future improvements. -

    -

    - We expect to be done by {payload}. Thanks for your - patience! -

    -
    -
    -
    - ) -} - export function Experiments(): JSX.Element { - const { - filteredExperiments, - experimentsLoading, - tab, - searchTerm, - shouldShowEmptyState, - searchStatus, - userFilter, - featureFlags, - } = useValues(experimentsLogic) + const { filteredExperiments, experimentsLoading, tab, searchTerm, shouldShowEmptyState, searchStatus, userFilter } = + useValues(experimentsLogic) const { setExperimentsTab, deleteExperiment, archiveExperiment, setSearchStatus, setSearchTerm, setUserFilter } = useActions(experimentsLogic) @@ -217,9 +180,7 @@ export function Experiments(): JSX.Element { }, ] - return featureFlags[FEATURE_FLAGS.EXPERIMENTS_MIGRATION_DISABLE_UI] ? ( - - ) : ( + return (
    ([ path((key) => ['scenes', 'experiment', 'experimentLogic', key]), connect(() => ({ values: [ - teamLogic, - ['currentTeamId'], + projectLogic, + ['currentProjectId'], groupsModel, ['aggregationLabel', 'groupTypes', 'showGroupsOptions'], sceneLogic, @@ -232,7 +232,7 @@ export const experimentLogic = kea([ funnelWindowIntervalUnit, aggregation_group_type_index, funnelAggregateByHogQL, - isSecondary = false, + isSecondary, }: { metricIdx: number name?: string @@ -496,7 +496,7 @@ export const experimentLogic = kea([ try { if (isUpdate) { response = await api.update( - `api/projects/${values.currentTeamId}/experiments/${values.experimentId}`, + `api/projects/${values.currentProjectId}/experiments/${values.experimentId}`, { ...values.experiment, parameters: { @@ -524,7 +524,7 @@ export const experimentLogic = kea([ return } } else { - response = await api.create(`api/projects/${values.currentTeamId}/experiments`, { + response = await api.create(`api/projects/${values.currentProjectId}/experiments`, { ...values.experiment, parameters: { ...values.experiment?.parameters, @@ -563,6 +563,9 @@ export const experimentLogic = kea([ if (experiment?.start_date) { actions.loadExperimentResults() + if (values.featureFlags[FEATURE_FLAGS.EXPERIMENTS_MULTIPLE_METRICS]) { + actions.loadMetricResults() + } actions.loadSecondaryMetricResults() } }, @@ -712,9 +715,9 @@ export const experimentLogic = kea([ } if (experimentEntitiesChanged) { - const url = `/api/projects/${values.currentTeamId}/experiments/requires_flag_implementation?${toParams( - experiment.filters || {} - )}` + const url = `/api/projects/${ + values.currentProjectId + }/experiments/requires_flag_implementation?${toParams(experiment.filters || {})}` await breakpoint(100) try { @@ -758,7 +761,7 @@ export const experimentLogic = kea([ ...values.experiment.parameters, variant_screenshot_media_ids: variantPreviewMediaIds, } - await api.update(`api/projects/${values.currentTeamId}/experiments/${values.experimentId}`, { + await api.update(`api/projects/${values.currentProjectId}/experiments/${values.experimentId}`, { parameters: updatedParameters, }) actions.setExperiment({ @@ -775,7 +778,7 @@ export const experimentLogic = kea([ if (props.experimentId && props.experimentId !== 'new') { try { const response = await api.get( - `api/projects/${values.currentTeamId}/experiments/${props.experimentId}` + `api/projects/${values.currentProjectId}/experiments/${props.experimentId}` ) return response as Experiment } catch (error: any) { @@ -790,7 +793,7 @@ export const experimentLogic = kea([ }, updateExperiment: async (update: Partial) => { const response: Experiment = await api.update( - `api/projects/${values.currentTeamId}/experiments/${values.experimentId}`, + `api/projects/${values.currentProjectId}/experiments/${values.experimentId}`, update ) return response @@ -830,7 +833,7 @@ export const experimentLogic = kea([ const refreshParam = refresh ? '?refresh=true' : '' const response: ExperimentResults = await api.get( - `api/projects/${values.currentTeamId}/experiments/${values.experimentId}/results${refreshParam}` + `api/projects/${values.currentProjectId}/experiments/${values.experimentId}/results${refreshParam}` ) return { ...response.result, @@ -853,6 +856,34 @@ export const experimentLogic = kea([ }, }, ], + metricResults: [ + null as (CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse)[] | null, + { + loadMetricResults: async ( + refresh?: boolean + ): Promise<(CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse)[] | null> => { + return (await Promise.all( + values.experiment?.metrics.map(async (metric) => { + try { + // Queries are shareable, so we need to set the experiment_id for the backend to correctly associate the query with the experiment + const queryWithExperimentId = { + ...metric, + experiment_id: values.experimentId, + } + const response = await performQuery(queryWithExperimentId, undefined, refresh) + + return { + ...response, + fakeInsightId: Math.random().toString(36).substring(2, 15), + } + } catch (error) { + return {} + } + }) + )) as (CachedExperimentTrendsQueryResponse | CachedExperimentFunnelsQueryResponse)[] + }, + }, + ], secondaryMetricResults: [ null as | SecondaryMetricResults[] @@ -876,7 +907,7 @@ export const experimentLogic = kea([ experiment_id: values.experimentId, } const response: ExperimentResults = await api.create( - `api/projects/${values.currentTeamId}/query`, + `api/projects/${values.currentProjectId}/query`, { query: queryWithExperimentId, refresh: 'lazy_async' } ) @@ -898,7 +929,7 @@ export const experimentLogic = kea([ (values.experiment?.secondary_metrics || []).map(async (_, index) => { try { const secResults = await api.get( - `api/projects/${values.currentTeamId}/experiments/${values.experimentId}/secondary_results?id=${index}${refreshParam}` + `api/projects/${values.currentProjectId}/experiments/${values.experimentId}/secondary_results?id=${index}${refreshParam}` ) // :TRICKY: Maintain backwards compatibility for cached responses, remove after cache period has expired if (secResults && secResults.result && !secResults.result.hasOwnProperty('result')) { @@ -945,7 +976,7 @@ export const experimentLogic = kea([ const newFilters = transformFiltersForWinningVariant(currentFlagFilters, selectedVariantKey) await api.update( - `api/projects/${values.currentTeamId}/feature_flags/${values.experiment.feature_flag?.id}`, + `api/projects/${values.currentProjectId}/feature_flags/${values.experiment.feature_flag?.id}`, { filters: newFilters } ) diff --git a/frontend/src/scenes/experiments/experimentsLogic.ts b/frontend/src/scenes/experiments/experimentsLogic.ts index 9f4c5bb1ca706..317b353070773 100644 --- a/frontend/src/scenes/experiments/experimentsLogic.ts +++ b/frontend/src/scenes/experiments/experimentsLogic.ts @@ -6,7 +6,7 @@ import api from 'lib/api' import { FEATURE_FLAGS } from 'lib/constants' import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' import { featureFlagLogic, FeatureFlagsSet } from 'lib/logic/featureFlagLogic' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { userLogic } from 'scenes/userLogic' import { Experiment, ExperimentsTabs, ProgressStatus } from '~/types' @@ -37,8 +37,8 @@ export const experimentsLogic = kea([ path(['scenes', 'experiments', 'experimentsLogic']), connect({ values: [ - teamLogic, - ['currentTeamId'], + projectLogic, + ['currentProjectId'], userLogic, ['user', 'hasAvailableFeature'], featureFlagLogic, @@ -76,16 +76,16 @@ export const experimentsLogic = kea([ [] as Experiment[], { loadExperiments: async () => { - const response = await api.get(`api/projects/${values.currentTeamId}/experiments?limit=1000`) + const response = await api.get(`api/projects/${values.currentProjectId}/experiments?limit=1000`) return response.results as Experiment[] }, deleteExperiment: async (id: number) => { - await api.delete(`api/projects/${values.currentTeamId}/experiments/${id}`) + await api.delete(`api/projects/${values.currentProjectId}/experiments/${id}`) lemonToast.info('Experiment removed') return values.experiments.filter((experiment) => experiment.id !== id) }, archiveExperiment: async (id: number) => { - await api.update(`api/projects/${values.currentTeamId}/experiments/${id}`, { archived: true }) + await api.update(`api/projects/${values.currentProjectId}/experiments/${id}`, { archived: true }) lemonToast.info('Experiment archived') return values.experiments.filter((experiment) => experiment.id !== id) }, diff --git a/frontend/src/scenes/experiments/utils.test.ts b/frontend/src/scenes/experiments/utils.test.ts index e3a73c89cbb4b..22d03cad8829a 100644 --- a/frontend/src/scenes/experiments/utils.test.ts +++ b/frontend/src/scenes/experiments/utils.test.ts @@ -1,5 +1,6 @@ import { EntityType, FeatureFlagFilters, InsightType } from '~/types' +import { getNiceTickValues } from './ExperimentView/DeltaViz' import { getMinimumDetectableEffect, transformFiltersForWinningVariant } from './utils' describe('utils', () => { @@ -215,3 +216,22 @@ describe('utils', () => { expect(newFilters).toEqual(expectedFilters) }) }) + +describe('getNiceTickValues', () => { + it('generates appropriate tick values for different ranges', () => { + // Small values (< 0.1) + expect(getNiceTickValues(0.08)).toEqual([-0.1, -0.08, -0.06, -0.04, -0.02, 0, 0.02, 0.04, 0.06, 0.08, 0.1]) + + // Medium small values (0.1 - 1) + expect(getNiceTickValues(0.45)).toEqual([-0.5, -0.4, -0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3, 0.4, 0.5]) + + // Values around 1 + expect(getNiceTickValues(1.2)).toEqual([-1.5, -1.0, -0.5, 0, 0.5, 1.0, 1.5]) + + // Values around 5 + expect(getNiceTickValues(4.7)).toEqual([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]) + + // Larger values + expect(getNiceTickValues(8.5)).toEqual([-10, -8, -6, -4, -2, 0, 2, 4, 6, 8, 10]) + }) +}) diff --git a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts index 9c247bf0d6c03..f4b5e9dc45a45 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts +++ b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts @@ -4,6 +4,7 @@ import api from 'lib/api' import { isEmptyProperty } from 'lib/components/PropertyFilters/utils' import { TaxonomicFilterGroupType, TaxonomicFilterProps } from 'lib/components/TaxonomicFilter/types' import { objectsEqual, range } from 'lib/utils' +import { projectLogic } from 'scenes/projectLogic' import { groupsModel } from '~/models/groupsModel' import { @@ -16,7 +17,6 @@ import { UserBlastRadiusType, } from '~/types' -import { teamLogic } from '../teamLogic' import type { featureFlagReleaseConditionsLogicType } from './FeatureFlagReleaseConditionsLogicType' // TODO: Type onChange errors properly @@ -33,7 +33,7 @@ export const featureFlagReleaseConditionsLogic = kea id ?? 'unknown'), connect({ - values: [teamLogic, ['currentTeamId'], groupsModel, ['groupTypes', 'aggregationLabel']], + values: [projectLogic, ['currentProjectId'], groupsModel, ['groupTypes', 'aggregationLabel']], }), actions({ setFilters: (filters: FeatureFlagFilters) => ({ filters }), @@ -156,10 +156,13 @@ export const featureFlagReleaseConditionsLogic = kea { void deleteWithUndo({ - endpoint: `projects/${currentTeamId}/feature_flags`, + endpoint: `projects/${currentProjectId}/feature_flags`, object: { name: featureFlag.key, id: featureFlag.id }, callback: loadFeatureFlags, }) diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 875b6f56cf81a..48889df0f3d63 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -584,13 +584,16 @@ export const featureFlagLogic = kea([ try { let savedFlag: FeatureFlagType if (!updatedFlag.id) { - savedFlag = await api.create(`api/projects/${values.currentTeamId}/feature_flags`, preparedFlag) + savedFlag = await api.create( + `api/projects/${values.currentProjectId}/feature_flags`, + preparedFlag + ) if (values.roleBasedAccessEnabled && savedFlag.id) { featureFlagPermissionsLogic({ flagId: null })?.actions.addAssociatedRoles(savedFlag.id) } } else { savedFlag = await api.update( - `api/projects/${values.currentTeamId}/feature_flags/${updatedFlag.id}`, + `api/projects/${values.currentProjectId}/feature_flags/${updatedFlag.id}`, preparedFlag ) } diff --git a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts index 9ad6e03a4fb0c..dc45e6e9352f7 100644 --- a/frontend/src/scenes/feature-flags/featureFlagsLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagsLogic.ts @@ -4,12 +4,12 @@ import { loaders } from 'kea-loaders' import { actionToUrl, router, urlToAction } from 'kea-router' import api from 'lib/api' import { objectsEqual, toParams } from 'lib/utils' +import { projectLogic } from 'scenes/projectLogic' import { Scene } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' import { Breadcrumb, FeatureFlagType } from '~/types' -import { teamLogic } from '../teamLogic' import type { featureFlagsLogicType } from './featureFlagsLogicType' export const FLAGS_PER_PAGE = 100 @@ -60,7 +60,7 @@ export const featureFlagsLogic = kea([ props({} as FlagLogicProps), path(['scenes', 'feature-flags', 'featureFlagsLogic']), connect({ - values: [teamLogic, ['currentTeamId']], + values: [projectLogic, ['currentProjectId']], }), actions({ updateFlag: (flag: FeatureFlagType) => ({ flag }), @@ -75,7 +75,7 @@ export const featureFlagsLogic = kea([ { loadFeatureFlags: async () => { const response = await api.get( - `api/projects/${values.currentTeamId}/feature_flags/?${toParams(values.paramsFromFilters)}` + `api/projects/${values.currentProjectId}/feature_flags/?${toParams(values.paramsFromFilters)}` ) return { @@ -85,7 +85,7 @@ export const featureFlagsLogic = kea([ }, updateFeatureFlag: async ({ id, payload }: { id: number; payload: Partial }) => { const response = await api.update( - `api/projects/${values.currentTeamId}/feature_flags/${id}`, + `api/projects/${values.currentProjectId}/feature_flags/${id}`, payload ) const updatedFlags = [...values.featureFlags.results].map((flag) => diff --git a/frontend/src/scenes/groups/groupLogic.ts b/frontend/src/scenes/groups/groupLogic.ts index a5d97ef290cc1..f5535964d1e58 100644 --- a/frontend/src/scenes/groups/groupLogic.ts +++ b/frontend/src/scenes/groups/groupLogic.ts @@ -7,8 +7,8 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { toParams } from 'lib/utils' import { capitalizeFirstLetter } from 'lib/utils' import { groupDisplayId } from 'scenes/persons/GroupActorDisplay' +import { projectLogic } from 'scenes/projectLogic' import { Scene } from 'scenes/sceneTypes' -import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { groupsModel } from '~/models/groupsModel' @@ -50,8 +50,8 @@ export const groupLogic = kea([ path((key) => ['scenes', 'groups', 'groupLogic', key]), connect({ values: [ - teamLogic, - ['currentTeamId'], + projectLogic, + ['currentProjectId'], groupsModel, ['groupTypes', 'aggregationLabel'], featureFlagLogic, @@ -68,7 +68,7 @@ export const groupLogic = kea([ { loadGroup: async () => { const params = { group_type_index: props.groupTypeIndex, group_key: props.groupKey } - const url = `api/projects/${values.currentTeamId}/groups/find?${toParams(params)}` + const url = `api/projects/${values.currentProjectId}/groups/find?${toParams(params)}` return await api.get(url) }, }, diff --git a/frontend/src/scenes/groups/groupsListLogic.ts b/frontend/src/scenes/groups/groupsListLogic.ts index 34a3bcb6dd80b..8e90f810dd9a7 100644 --- a/frontend/src/scenes/groups/groupsListLogic.ts +++ b/frontend/src/scenes/groups/groupsListLogic.ts @@ -2,7 +2,7 @@ import { actions, afterMount, connect, kea, key, listeners, path, props, reducer import { loaders } from 'kea-loaders' import api from 'lib/api' import { groupsAccessLogic } from 'lib/introductions/groupsAccessLogic' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { groupsModel, Noun } from '~/models/groupsModel' import { Group } from '~/types' @@ -25,8 +25,8 @@ export const groupsListLogic = kea([ path(['groups', 'groupsListLogic']), connect({ values: [ - teamLogic, - ['currentTeamId'], + projectLogic, + ['currentProjectId'], groupsModel, ['groupTypes', 'aggregationLabel'], groupsAccessLogic, @@ -49,7 +49,7 @@ export const groupsListLogic = kea([ } url = url || - `api/projects/${values.currentTeamId}/groups/?group_type_index=${props.groupTypeIndex}${ + `api/projects/${values.currentProjectId}/groups/?group_type_index=${props.groupTypeIndex}${ values.search ? '&search=' + encodeURIComponent(values.search) : '' }` return await api.get(url) diff --git a/frontend/src/scenes/groups/relatedGroupsLogic.ts b/frontend/src/scenes/groups/relatedGroupsLogic.ts index a5088d95ddd3e..8ad630510e11c 100644 --- a/frontend/src/scenes/groups/relatedGroupsLogic.ts +++ b/frontend/src/scenes/groups/relatedGroupsLogic.ts @@ -2,7 +2,7 @@ import { actions, connect, events, kea, key, path, props } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' import { toParams } from 'lib/utils' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { ActorType } from '~/types' @@ -17,7 +17,7 @@ export const relatedGroupsLogic = kea([ ), key((props) => `${props.groupTypeIndex ?? 'person'}-${props.id}`), path(['scenes', 'groups', 'relatedGroupsLogic']), - connect({ values: [teamLogic, ['currentTeamId']] }), + connect({ values: [projectLogic, ['currentProjectId']] }), actions(() => ({ loadRelatedActors: true, })), @@ -26,7 +26,7 @@ export const relatedGroupsLogic = kea([ [] as ActorType[], { loadRelatedActors: async () => { - const url = `api/projects/${values.currentTeamId}/groups/related?${toParams({ + const url = `api/projects/${values.currentProjectId}/groups/related?${toParams({ group_type_index: props.groupTypeIndex, id: props.id, })}` diff --git a/frontend/src/scenes/heatmaps/heatmapsBrowserLogic.ts b/frontend/src/scenes/heatmaps/heatmapsBrowserLogic.ts index b6e0ef59d53b8..d9696985e600b 100644 --- a/frontend/src/scenes/heatmaps/heatmapsBrowserLogic.ts +++ b/frontend/src/scenes/heatmaps/heatmapsBrowserLogic.ts @@ -32,6 +32,9 @@ export interface IFrameBanner { message: string | JSX.Element } +// team id is always available on window +const teamId = window.POSTHOG_APP_CONTEXT?.current_team?.id + export const heatmapsBrowserLogic = kea([ path(['scenes', 'heatmaps', 'heatmapsBrowserLogic']), props({} as HeatmapsBrowserLogicProps), @@ -172,7 +175,7 @@ export const heatmapsBrowserLogic = kea([ ], browserUrl: [ null as string | null, - { persist: true }, + { persist: true, prefix: `${teamId}__` }, { setBrowserUrl: (_, { url }) => url, }, diff --git a/frontend/src/scenes/insights/InsightPageHeader.tsx b/frontend/src/scenes/insights/InsightPageHeader.tsx index 4c4fd79933da1..9ff53430145ad 100644 --- a/frontend/src/scenes/insights/InsightPageHeader.tsx +++ b/frontend/src/scenes/insights/InsightPageHeader.tsx @@ -31,8 +31,8 @@ import { InsightSaveButton } from 'scenes/insights/InsightSaveButton' import { insightSceneLogic } from 'scenes/insights/insightSceneLogic' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' +import { projectLogic } from 'scenes/projectLogic' import { savedInsightsLogic } from 'scenes/saved-insights/savedInsightsLogic' -import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' import { userLogic } from 'scenes/userLogic' @@ -83,7 +83,7 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In const { tags } = useValues(tagsModel) const { user } = useValues(userLogic) const { preflight } = useValues(preflightLogic) - const { currentTeamId } = useValues(teamLogic) + const { currentProjectId } = useValues(projectLogic) const { push } = useActions(router) const [addToDashboardModalOpen, setAddToDashboardModalOpenModal] = useState(false) @@ -316,7 +316,7 @@ export function InsightPageHeader({ insightLogicProps }: { insightLogicProps: In onClick={() => void deleteInsightWithUndo({ object: insight as QueryBasedInsightModel, - endpoint: `projects/${currentTeamId}/insights`, + endpoint: `projects/${currentProjectId}/insights`, callback: () => { loadInsights() push(urls.savedInsights()) diff --git a/frontend/src/scenes/insights/insightLogic.tsx b/frontend/src/scenes/insights/insightLogic.tsx index a299d639fee1d..a3c9905180538 100644 --- a/frontend/src/scenes/insights/insightLogic.tsx +++ b/frontend/src/scenes/insights/insightLogic.tsx @@ -408,9 +408,15 @@ export const insightLogic: LogicWrapper = kea = { @@ -499,6 +502,9 @@ export const languageCodeToName: Record = { za: 'Zhuang', zu: 'Zulu', + // Some browsers might use `zz` to imply an unknown locale + zz: 'Unknown', + // Some browsers use one-long or three-long codes so we're adding here as fallback h: 'Croatian', chr: 'Cherokee', @@ -510,7 +516,13 @@ export const languageCodeToName: Record = { // This is only used as a fallback for some languages that don't usually // come in the locale-country format (such as nl-NL usually being presented simply as nl) -// We'll fill this as we see fit based on the values seen in the wild -const languageCodeToCountryCode: Record = { - nl: 'NL', +// but that can't simply be translated to a flag by capitalizing the locale +const languageCodeToEmojiFlag: Record = { + ar: '🇪🇬', // Arabic -> Egypt, tricky, there's no good representation for the "default Arab country", the Egyptian variant is the most commonly understood + af: '🇿🇦', // Afrikaans -> South Africa + ta: '🇮🇳', // Tamil -> India + eu: '🇪🇸', // Basque -> Spain, tricky, but there's no better flag than the Spanish one, we could move to custom SVGs if we wanted to solve this + cy: '🏴󠁧󠁢󠁷󠁬󠁳󠁿', // Welsh -> Wales + ne: '🇳🇵', // Nepali -> Nepal + zz: '🇺🇳', // Unknown Language -> UN flag? } diff --git a/frontend/src/scenes/messaging/functionsTableLogic.tsx b/frontend/src/scenes/messaging/functionsTableLogic.tsx index 054b8144d73e8..035157dcb2ee0 100644 --- a/frontend/src/scenes/messaging/functionsTableLogic.tsx +++ b/frontend/src/scenes/messaging/functionsTableLogic.tsx @@ -3,7 +3,7 @@ import { actions, afterMount, connect, kea, key, path, props, reducers, selector import { loaders } from 'kea-loaders' import api from 'lib/api' import { deleteWithUndo } from 'lib/utils/deleteWithUndo' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { HogFunctionType, HogFunctionTypeType } from '~/types' @@ -23,7 +23,7 @@ export const functionsTableLogic = kea([ props({} as FunctionsTableLogicProps), key((props: FunctionsTableLogicProps) => props.type ?? 'destination'), connect({ - values: [teamLogic, ['currentTeamId']], + values: [projectLogic, ['currentProjectId']], }), actions({ deleteHogFunction: (hogFunction: HogFunctionType) => ({ hogFunction }), @@ -48,11 +48,11 @@ export const functionsTableLogic = kea([ { loadHogFunctions: async () => { // TODO: pagination? - return (await api.hogFunctions.list({ type: props.type ?? 'destination' })).results + return (await api.hogFunctions.list(undefined, props.type ?? 'destination')).results }, deleteHogFunction: async ({ hogFunction }) => { await deleteWithUndo({ - endpoint: `projects/${teamLogic.values.currentTeamId}/hog_functions`, + endpoint: `projects/${values.currentProjectId}/hog_functions`, object: { id: hogFunction.id, name: hogFunction.name, diff --git a/frontend/src/scenes/persons/relatedFeatureFlagsLogic.ts b/frontend/src/scenes/persons/relatedFeatureFlagsLogic.ts index 2a3d23f34fc16..73d0110ea1599 100644 --- a/frontend/src/scenes/persons/relatedFeatureFlagsLogic.ts +++ b/frontend/src/scenes/persons/relatedFeatureFlagsLogic.ts @@ -4,7 +4,7 @@ import { loaders } from 'kea-loaders' import api from 'lib/api' import { toParams } from 'lib/utils' import { featureFlagsLogic } from 'scenes/feature-flags/featureFlagsLogic' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { FeatureFlagReleaseType, FeatureFlagType } from '~/types' @@ -36,7 +36,7 @@ export interface RelatedFlagsFilters { export const relatedFeatureFlagsLogic = kea([ path(['scenes', 'persons', 'relatedFeatureFlagsLogic']), - connect({ values: [teamLogic, ['currentTeamId'], featureFlagsLogic, ['featureFlags']] }), + connect({ values: [projectLogic, ['currentProjectId'], featureFlagsLogic, ['featureFlags']] }), props( {} as { distinctId: string @@ -56,7 +56,7 @@ export const relatedFeatureFlagsLogic = kea([ { loadRelatedFeatureFlags: async () => { const response = await api.get( - `api/projects/${values.currentTeamId}/feature_flags/evaluation_reasons?${toParams({ + `api/projects/${values.currentProjectId}/feature_flags/evaluation_reasons?${toParams({ distinct_id: props.distinctId, ...(props.groups ? { groups: props.groups } : {}), })}` diff --git a/frontend/src/scenes/pipeline/FrontendApps.tsx b/frontend/src/scenes/pipeline/FrontendApps.tsx index d6fad0d6ea6e8..ed1f984facc3c 100644 --- a/frontend/src/scenes/pipeline/FrontendApps.tsx +++ b/frontend/src/scenes/pipeline/FrontendApps.tsx @@ -13,29 +13,43 @@ import { NewButton } from './NewButton' import { SiteApp } from './types' import { appColumn, nameColumn, pipelinePluginBackedNodeMenuCommonItems } from './utils' -export function FrontendApps(): JSX.Element { +export interface FrontendAppsProps { + asLegacyList?: boolean +} + +export function FrontendApps({ asLegacyList }: FrontendAppsProps): JSX.Element { const { loading, frontendApps } = useValues(frontendAppsLogic) const { toggleEnabled, loadPluginConfigs } = useActions(frontendAppsLogic) - const shouldShowEmptyState = frontendApps.length === 0 && !loading + const shouldShowEmptyState = frontendApps.length === 0 && !loading && !asLegacyList return ( <> - } - /> - } - isEmpty={shouldShowEmptyState} - /> + {!asLegacyList && ( + } + /> + )} + {!asLegacyList && ( + } + isEmpty={shouldShowEmptyState} + /> + )} {!shouldShowEmptyState && ( <> + {!loading && asLegacyList && ( + <> +

    Legacy Site apps

    +

    These site apps are using an older system and should eventually be migrated over.

    + + )} } + size={size} > New {stage} diff --git a/frontend/src/scenes/pipeline/Overview.tsx b/frontend/src/scenes/pipeline/Overview.tsx index 37be9276f686c..ba8e77a7c1ad0 100644 --- a/frontend/src/scenes/pipeline/Overview.tsx +++ b/frontend/src/scenes/pipeline/Overview.tsx @@ -9,6 +9,7 @@ import { urls } from 'scenes/urls' import { PipelineStage, PipelineTab } from '~/types' +import { DESTINATION_TYPES } from './destinations/constants' import { DestinationsTable } from './destinations/Destinations' import { TransformationsTable } from './Transformations' @@ -72,7 +73,7 @@ export function Overview(): JSX.Element { Send your data to destinations in real time or with batch exports. Only active Destinations are shown here. See all.

    - +
    diff --git a/frontend/src/scenes/pipeline/Pipeline.tsx b/frontend/src/scenes/pipeline/Pipeline.tsx index 5e689fea80e56..f5dd0a7907429 100644 --- a/frontend/src/scenes/pipeline/Pipeline.tsx +++ b/frontend/src/scenes/pipeline/Pipeline.tsx @@ -1,13 +1,16 @@ import { useValues } from 'kea' import { router } from 'kea-router' import { ActivityLog } from 'lib/components/ActivityLog/ActivityLog' +import { FEATURE_FLAGS } from 'lib/constants' import { ConcreteLemonTab, LemonTabs } from 'lib/lemon-ui/LemonTabs' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { SceneExport } from 'scenes/sceneTypes' import { urls } from 'scenes/urls' import { ActivityScope, PipelineTab } from '~/types' import { AppsManagement } from './AppsManagement' +import { DESTINATION_TYPES, SITE_APP_TYPES } from './destinations/constants' import { Destinations } from './destinations/Destinations' import { FrontendApps } from './FrontendApps' import { ImportApps } from './ImportApps' @@ -22,13 +25,21 @@ export function Pipeline(): JSX.Element { const { canGloballyManagePlugins } = useValues(pipelineAccessLogic) const { currentTab } = useValues(pipelineLogic) const { hasEnabledImportApps } = useValues(importAppsLogic) + const { featureFlags } = useValues(featureFlagLogic) const tabs: Pick, 'key' | 'content'>[] = [ { key: PipelineTab.Overview, content: }, { key: PipelineTab.Sources, content: }, { key: PipelineTab.Transformations, content: }, - { key: PipelineTab.Destinations, content: }, - { key: PipelineTab.SiteApps, content: }, + { key: PipelineTab.Destinations, content: }, + { + key: PipelineTab.SiteApps, + content: featureFlags[FEATURE_FLAGS.SITE_APP_FUNCTIONS] ? ( + + ) : ( + + ), + }, ] // Import apps are deprecated, we only show the tab if there are some still enabled diff --git a/frontend/src/scenes/pipeline/PipelineNode.tsx b/frontend/src/scenes/pipeline/PipelineNode.tsx index 1f610cebe7fa9..bd5512498c29a 100644 --- a/frontend/src/scenes/pipeline/PipelineNode.tsx +++ b/frontend/src/scenes/pipeline/PipelineNode.tsx @@ -41,7 +41,7 @@ const paramsToProps = ({ } return { - stage: PIPELINE_TAB_TO_NODE_STAGE[stage] || null, + stage: PIPELINE_TAB_TO_NODE_STAGE[stage as PipelineTab] || null, id: numericId && !isNaN(numericId) ? numericId : id, } } @@ -97,6 +97,11 @@ export function PipelineNode(params: { stage?: string; id?: string } = {}): JSX. ) } + if (stage === PipelineStage.SiteApp) { + delete tabToContent[PipelineNodeTab.Logs] + delete tabToContent[PipelineNodeTab.Metrics] + } + return ( <> diff --git a/frontend/src/scenes/pipeline/PipelineNodeNew.tsx b/frontend/src/scenes/pipeline/PipelineNodeNew.tsx index 5b26f115a9866..8e22d52f2642c 100644 --- a/frontend/src/scenes/pipeline/PipelineNodeNew.tsx +++ b/frontend/src/scenes/pipeline/PipelineNodeNew.tsx @@ -3,9 +3,11 @@ import { useActions, useValues } from 'kea' import { combineUrl, router } from 'kea-router' import { NotFound } from 'lib/components/NotFound' import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' +import { FEATURE_FLAGS } from 'lib/constants' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonTable } from 'lib/lemon-ui/LemonTable' import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { useEffect } from 'react' import { NewSourceWizardScene } from 'scenes/data-warehouse/new/NewSourceWizard' import { SceneExport } from 'scenes/sceneTypes' @@ -13,6 +15,7 @@ import { urls } from 'scenes/urls' import { AvailableFeature, PipelineStage, PluginType } from '~/types' +import { DESTINATION_TYPES, SITE_APP_TYPES } from './destinations/constants' import { NewDestinations } from './destinations/NewDestinations' import { frontendAppsLogic } from './frontendAppsLogic' import { HogFunctionConfiguration } from './hogfunctions/HogFunctionConfiguration' @@ -71,6 +74,7 @@ function convertPluginToTableEntry(plugin: PluginType): TableEntry { } export function PipelineNodeNew(params: { stage?: string; id?: string } = {}): JSX.Element { + const { featureFlags } = useValues(featureFlagLogic) const { stage, pluginId, batchExportDestination, hogFunctionId } = paramsToProps({ params }) if (!stage) { @@ -103,9 +107,13 @@ export function PipelineNodeNew(params: { stage?: string; id?: string } = {}): J if (stage === PipelineStage.Transformation) { return } else if (stage === PipelineStage.Destination) { - return + return } else if (stage === PipelineStage.SiteApp) { - return + return featureFlags[FEATURE_FLAGS.SITE_APP_FUNCTIONS] ? ( + + ) : ( + + ) } else if (stage === PipelineStage.Source) { return } diff --git a/frontend/src/scenes/pipeline/destinations/DestinationTag.tsx b/frontend/src/scenes/pipeline/destinations/DestinationTag.tsx index 328751f8a7bf5..0c65ee12f1634 100644 --- a/frontend/src/scenes/pipeline/destinations/DestinationTag.tsx +++ b/frontend/src/scenes/pipeline/destinations/DestinationTag.tsx @@ -11,6 +11,8 @@ export function DestinationTag({ status }: { status: HogFunctionTemplateStatus } return Beta case 'stable': return New // Once Hog Functions are fully released we can remove the new label + case 'client-side': + return Client-Side default: return status ? {capitalizeFirstLetter(status)} : null } diff --git a/frontend/src/scenes/pipeline/destinations/Destinations.tsx b/frontend/src/scenes/pipeline/destinations/Destinations.tsx index 336f15ba90762..4f7e8cc4f37f5 100644 --- a/frontend/src/scenes/pipeline/destinations/Destinations.tsx +++ b/frontend/src/scenes/pipeline/destinations/Destinations.tsx @@ -9,58 +9,110 @@ import { updatedAtColumn } from 'lib/lemon-ui/LemonTable/columnUtils' import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' import { urls } from 'scenes/urls' -import { AvailableFeature, PipelineNodeTab, PipelineStage, ProductKey } from '~/types' +import { AvailableFeature, HogFunctionTypeType, PipelineNodeTab, PipelineStage, ProductKey } from '~/types' import { AppMetricSparkLine } from '../AppMetricSparkLine' +import { FrontendApps } from '../FrontendApps' import { HogFunctionIcon } from '../hogfunctions/HogFunctionIcon' import { HogFunctionStatusIndicator } from '../hogfunctions/HogFunctionStatusIndicator' +import { hogFunctionTypeToPipelineStage } from '../hogfunctions/urls' import { AppMetricSparkLineV2 } from '../metrics/AppMetricsV2Sparkline' import { NewButton } from '../NewButton' import { pipelineAccessLogic } from '../pipelineAccessLogic' -import { Destination, PipelineBackend } from '../types' +import { Destination, PipelineBackend, SiteApp } from '../types' import { pipelineNodeMenuCommonItems, RenderApp, RenderBatchExportIcon } from '../utils' import { DestinationsFilters } from './DestinationsFilters' import { destinationsFiltersLogic } from './destinationsFiltersLogic' import { pipelineDestinationsLogic } from './destinationsLogic' import { DestinationOptionsTable } from './NewDestinations' -export function Destinations(): JSX.Element { - const { destinations, loading } = useValues(pipelineDestinationsLogic({ syncFiltersWithUrl: true })) +export interface DestinationsProps { + types: HogFunctionTypeType[] +} + +export function Destinations({ types }: DestinationsProps): JSX.Element { + const { destinations, loading } = useValues(pipelineDestinationsLogic({ types })) return ( <> - } - /> - - } - isEmpty={destinations.length === 0 && !loading} + {types.includes('destination') ? ( + <> + } + /> + + } + isEmpty={destinations.length === 0 && !loading} + /> + + + ) : types.includes('site_app') ? ( + } /> - - + ) : ( + } + /> + )} + +
    -

    New destinations

    - +

    + {types.includes('destination') + ? 'New destinations' + : types.includes('site_app') + ? 'New site app' + : 'New Hog function'} +

    + + {/* Old site-apps until we migrate everyone onto the new ones */} + {types.includes('site_app') ? : null} ) } +export type DestinationsTableProps = { + types: HogFunctionTypeType[] + hideFeedback?: boolean + hideAddDestinationButton?: boolean +} -export function DestinationsTable(): JSX.Element { +export function DestinationsTable({ + hideFeedback, + hideAddDestinationButton, + types, +}: DestinationsTableProps): JSX.Element { const { canConfigurePlugins, canEnableDestination } = useValues(pipelineAccessLogic) - const { loading, filteredDestinations, destinations, hiddenDestinations } = useValues(pipelineDestinationsLogic) - const { toggleNode, deleteNode } = useActions(pipelineDestinationsLogic) - const { resetFilters } = useActions(destinationsFiltersLogic) + const { loading, filteredDestinations, destinations, hiddenDestinations } = useValues( + pipelineDestinationsLogic({ types }) + ) + const { toggleNode, deleteNode } = useActions(pipelineDestinationsLogic({ types })) + const { resetFilters } = useActions(destinationsFiltersLogic({ types })) + + const showFrequencyHistory = types.includes('destination') + const simpleName = + types.includes('destination') || types.includes('site_destination') + ? 'destination' + : types.includes('site_app') + ? 'site app' + : 'Hog function' return (
    - + - {destination.backend === PipelineBackend.HogFunction ? ( - - ) : ( - - )} - - ) - }, - }, - updatedAtColumn() as LemonTableColumn, + ...(showFrequencyHistory + ? [ + { + title: 'Frequency', + key: 'interval', + render: function RenderFrequency(_, destination) { + return 'interval' in destination ? destination.interval : null + }, + } as LemonTableColumn, + ] + : []), + ...(showFrequencyHistory + ? [ + { + title: 'Last 7 days', + render: function RenderSuccessRate(_, destination) { + return ( + + {destination.backend === PipelineBackend.HogFunction ? ( + + ) : ( + + )} + + ) + }, + } as LemonTableColumn, + ] + : []), + updatedAtColumn() as LemonTableColumn, { title: 'Status', key: 'enabled', @@ -167,23 +227,23 @@ export function DestinationsTable(): JSX.Element { items={[ { label: destination.enabled - ? 'Pause destination' - : 'Unpause destination', + ? `Pause ${simpleName}` + : `Unpause ${simpleName}`, onClick: () => toggleNode(destination, !destination.enabled), disabledReason: !canConfigurePlugins - ? 'You do not have permission to toggle destinations.' + ? `You do not have permission to toggle ${simpleName}s.` : !canEnableDestination(destination) && !destination.enabled - ? 'Data pipelines add-on is required for enabling new destinations' + ? `Data pipelines add-on is required for enabling new ${simpleName}s` : undefined, }, ...pipelineNodeMenuCommonItems(destination), { - label: 'Delete destination', + label: `Delete ${simpleName}`, status: 'danger' as const, // for typechecker happiness onClick: () => deleteNode(destination), disabledReason: canConfigurePlugins ? undefined - : 'You do not have permission to delete destinations.', + : `You do not have permission to delete ${simpleName}.`, }, ]} /> diff --git a/frontend/src/scenes/pipeline/destinations/DestinationsFilters.tsx b/frontend/src/scenes/pipeline/destinations/DestinationsFilters.tsx index b5dd7704b519e..171d0ec5d2fa7 100644 --- a/frontend/src/scenes/pipeline/destinations/DestinationsFilters.tsx +++ b/frontend/src/scenes/pipeline/destinations/DestinationsFilters.tsx @@ -1,22 +1,31 @@ import { LemonCheckbox, LemonInput, LemonSelect, Link } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' +import { NewButton } from 'scenes/pipeline/NewButton' + +import { HogFunctionTypeType, PipelineStage } from '~/types' import { PipelineBackend } from '../types' import { destinationsFiltersLogic } from './destinationsFiltersLogic' export type DestinationsFiltersProps = { + types: HogFunctionTypeType[] hideSearch?: boolean hideShowPaused?: boolean hideKind?: boolean + hideFeedback?: boolean + hideAddDestinationButton?: boolean } export function DestinationsFilters({ + types, hideSearch, hideShowPaused, hideKind, + hideFeedback, + hideAddDestinationButton = true, }: DestinationsFiltersProps): JSX.Element | null { - const { filters } = useValues(destinationsFiltersLogic) - const { setFilters, openFeedbackDialog } = useActions(destinationsFiltersLogic) + const { filters } = useValues(destinationsFiltersLogic({ types })) + const { setFilters, openFeedbackDialog } = useActions(destinationsFiltersLogic({ types })) return (
    @@ -29,9 +38,11 @@ export function DestinationsFilters({ onChange={(e) => setFilters({ search: e })} /> )} - openFeedbackDialog()}> - Can't find what you're looking for? - + {!hideFeedback ? ( + openFeedbackDialog()}> + Can't find what you're looking for? + + ) : null}
    {typeof hideShowPaused !== 'boolean' && ( setFilters({ kind: e ?? null })} /> )} + {hideAddDestinationButton ? null : }
    ) diff --git a/frontend/src/scenes/pipeline/destinations/NewDestinations.tsx b/frontend/src/scenes/pipeline/destinations/NewDestinations.tsx index a11d06489574f..7ac6e5f9564c8 100644 --- a/frontend/src/scenes/pipeline/destinations/NewDestinations.tsx +++ b/frontend/src/scenes/pipeline/destinations/NewDestinations.tsx @@ -5,7 +5,7 @@ import { PayGateButton } from 'lib/components/PayGateMini/PayGateButton' import { PayGateMini } from 'lib/components/PayGateMini/PayGateMini' import { LemonTableLink } from 'lib/lemon-ui/LemonTable/LemonTableLink' -import { AvailableFeature, PipelineStage } from '~/types' +import { AvailableFeature, HogFunctionTypeType, PipelineStage } from '~/types' import { pipelineAccessLogic } from '../pipelineAccessLogic' import { DestinationsFilters } from './DestinationsFilters' @@ -13,20 +13,24 @@ import { destinationsFiltersLogic } from './destinationsFiltersLogic' import { DestinationTag } from './DestinationTag' import { newDestinationsLogic } from './newDestinationsLogic' -export function NewDestinations(): JSX.Element { +export interface NewDestinationsProps { + types: HogFunctionTypeType[] +} + +export function NewDestinations({ types }: NewDestinationsProps): JSX.Element { return (
    - - - + {types.includes('destination') ? : null} + +
    ) } -export function DestinationOptionsTable(): JSX.Element { - const { loading, filteredDestinations, hiddenDestinations } = useValues(newDestinationsLogic) +export function DestinationOptionsTable({ types }: NewDestinationsProps): JSX.Element { + const { loading, filteredDestinations, hiddenDestinations } = useValues(newDestinationsLogic({ types })) const { canEnableDestination } = useValues(pipelineAccessLogic) - const { resetFilters } = useActions(destinationsFiltersLogic) + const { resetFilters } = useActions(destinationsFiltersLogic({ types })) return ( <> diff --git a/frontend/src/scenes/pipeline/destinations/constants.ts b/frontend/src/scenes/pipeline/destinations/constants.ts new file mode 100644 index 0000000000000..dda2e7d0fe3d0 --- /dev/null +++ b/frontend/src/scenes/pipeline/destinations/constants.ts @@ -0,0 +1,4 @@ +import { HogFunctionTypeType } from '~/types' + +export const DESTINATION_TYPES = ['destination', 'site_destination'] satisfies HogFunctionTypeType[] +export const SITE_APP_TYPES = ['site_app'] satisfies HogFunctionTypeType[] diff --git a/frontend/src/scenes/pipeline/destinations/destinationsFiltersLogic.tsx b/frontend/src/scenes/pipeline/destinations/destinationsFiltersLogic.tsx index 77b98d9cb1a68..469b33640cfbc 100644 --- a/frontend/src/scenes/pipeline/destinations/destinationsFiltersLogic.tsx +++ b/frontend/src/scenes/pipeline/destinations/destinationsFiltersLogic.tsx @@ -1,5 +1,5 @@ import { LemonDialog, LemonInput, LemonTextArea, lemonToast } from '@posthog/lemon-ui' -import { actions, connect, kea, listeners, path, reducers } from 'kea' +import { actions, connect, kea, key, listeners, path, props, reducers } from 'kea' import { actionToUrl, router, urlToAction } from 'kea-router' import { LemonField } from 'lib/lemon-ui/LemonField' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' @@ -7,6 +7,8 @@ import { objectsEqual } from 'lib/utils' import posthog from 'posthog-js' import { userLogic } from 'scenes/userLogic' +import { HogFunctionTypeType } from '~/types' + import { PipelineBackend } from '../types' import type { destinationsFiltersLogicType } from './destinationsFiltersLogicType' @@ -17,8 +19,14 @@ export type DestinationsFilters = { showPaused?: boolean } +export interface DestinationsFiltersLogicProps { + types: HogFunctionTypeType[] +} + export const destinationsFiltersLogic = kea([ path(() => ['scenes', 'pipeline', 'destinations', 'destinationsFiltersLogic']), + props({} as DestinationsFiltersLogicProps), + key((props) => props.types.join(',') ?? ''), connect({ values: [userLogic, ['user'], featureFlagLogic, ['featureFlags']], }), diff --git a/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx b/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx index 6f87bd5668ad8..bed880c886729 100644 --- a/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx +++ b/frontend/src/scenes/pipeline/destinations/destinationsLogic.tsx @@ -1,17 +1,19 @@ import { lemonToast } from '@posthog/lemon-ui' import FuseClass from 'fuse.js' -import { actions, afterMount, connect, kea, listeners, path, selectors } from 'kea' +import { actions, afterMount, connect, kea, key, listeners, path, props, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { deleteWithUndo } from 'lib/utils/deleteWithUndo' +import { projectLogic } from 'scenes/projectLogic' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' import { BatchExportConfiguration, HogFunctionType, + HogFunctionTypeType, PipelineStage, PluginConfigTypeNew, PluginConfigWithPluginInfoNew, @@ -25,6 +27,7 @@ import { Destination, FunctionDestination, PipelineBackend, + SiteApp, WebhookDestination, } from '../types' import { captureBatchExportEvent, capturePluginEvent, loadPluginsFromUrl } from '../utils' @@ -32,28 +35,34 @@ import { destinationsFiltersLogic } from './destinationsFiltersLogic' import type { pipelineDestinationsLogicType } from './destinationsLogicType' // Helping kea-typegen navigate the exported default class for Fuse -export interface Fuse extends FuseClass {} +export interface Fuse extends FuseClass {} + +export interface PipelineDestinationsLogicProps { + types: HogFunctionTypeType[] +} export const pipelineDestinationsLogic = kea([ path(['scenes', 'pipeline', 'destinationsLogic']), - connect({ + props({} as PipelineDestinationsLogicProps), + key((props: PipelineDestinationsLogicProps) => props.types.join(',')), + connect((props: PipelineDestinationsLogicProps) => ({ values: [ - teamLogic, - ['currentTeamId'], + projectLogic, + ['currentProjectId'], userLogic, ['user', 'hasAvailableFeature'], pipelineAccessLogic, ['canEnableDestination'], featureFlagLogic, ['featureFlags'], - destinationsFiltersLogic, + destinationsFiltersLogic(props), ['filters'], ], - }), + })), actions({ - toggleNode: (destination: Destination, enabled: boolean) => ({ destination, enabled }), + toggleNode: (destination: Destination | SiteApp, enabled: boolean) => ({ destination, enabled }), toggleNodeHogFunction: (destination: FunctionDestination, enabled: boolean) => ({ destination, enabled }), - deleteNode: (destination: Destination) => ({ destination }), + deleteNode: (destination: Destination | SiteApp) => ({ destination }), deleteNodeBatchExport: (destination: BatchExportDestination) => ({ destination }), deleteNodeHogFunction: (destination: FunctionDestination) => ({ destination }), deleteNodeWebhook: (destination: WebhookDestination) => ({ destination }), @@ -61,7 +70,7 @@ export const pipelineDestinationsLogic = kea([ updatePluginConfig: (pluginConfig: PluginConfigTypeNew) => ({ pluginConfig }), updateBatchExportConfig: (batchExportConfig: BatchExportConfiguration) => ({ batchExportConfig }), }), - loaders(({ values, actions }) => ({ + loaders(({ values, actions, props }) => ({ plugins: [ {} as Record, { @@ -76,7 +85,7 @@ export const pipelineDestinationsLogic = kea([ loadPluginConfigs: async () => { const pluginConfigs: Record = {} const results = await api.loadPaginatedResults( - `api/projects/${values.currentTeamId}/pipeline_destination_configs` + `api/projects/${values.currentProjectId}/pipeline_destination_configs` ) for (const pluginConfig of results) { @@ -133,7 +142,7 @@ export const pipelineDestinationsLogic = kea([ { loadBatchExports: async () => { const results = await api.loadPaginatedResults( - `api/projects/${values.currentTeamId}/batch_exports` + `api/projects/${values.currentProjectId}/batch_exports` ) return Object.fromEntries(results.map((batchExport) => [batchExport.id, batchExport])) }, @@ -165,8 +174,11 @@ export const pipelineDestinationsLogic = kea([ [] as HogFunctionType[], { loadHogFunctions: async () => { - // TODO: Support pagination? - return (await api.hogFunctions.list({ type: 'destination' })).results + const siteDesinationsEnabled = !!values.featureFlags[FEATURE_FLAGS.SITE_DESTINATIONS] + const destinationTypes = siteDesinationsEnabled + ? props.types + : props.types.filter((type) => type !== 'site_destination') + return (await api.hogFunctions.list(undefined, destinationTypes)).results }, deleteNodeHogFunction: async ({ destination }) => { @@ -175,7 +187,7 @@ export const pipelineDestinationsLogic = kea([ } await deleteWithUndo({ - endpoint: `projects/${teamLogic.values.currentTeamId}/hog_functions`, + endpoint: `projects/${values.currentProjectId}/hog_functions`, object: { id: destination.hog_function.id, name: destination.name, @@ -221,7 +233,14 @@ export const pipelineDestinationsLogic = kea([ ], destinations: [ (s) => [s.pluginConfigs, s.plugins, s.batchExportConfigs, s.hogFunctions, s.user, s.featureFlags], - (pluginConfigs, plugins, batchExportConfigs, hogFunctions, user, featureFlags): Destination[] => { + ( + pluginConfigs, + plugins, + batchExportConfigs, + hogFunctions, + user, + featureFlags + ): (Destination | SiteApp)[] => { // Migrations are shown only in impersonation mode, for us to be able to trigger them. const httpEnabled = featureFlags[FEATURE_FLAGS.BATCH_EXPORTS_POSTHOG_HTTP] || user?.is_impersonated || user?.is_staff @@ -241,7 +260,10 @@ export const pipelineDestinationsLogic = kea([ ) .concat(rawBatchExports) const convertedDestinations = rawDestinations.map((d) => - convertToPipelineNode(d, PipelineStage.Destination) + convertToPipelineNode( + d, + 'type' in d && d.type === 'site_app' ? PipelineStage.SiteApp : PipelineStage.Destination + ) ) const enabledFirst = convertedDestinations.sort((a, b) => Number(b.enabled) - Number(a.enabled)) return enabledFirst @@ -259,7 +281,7 @@ export const pipelineDestinationsLogic = kea([ filteredDestinations: [ (s) => [s.filters, s.destinations, s.destinationsFuse], - (filters, destinations, destinationsFuse): Destination[] => { + (filters, destinations, destinationsFuse): (Destination | SiteApp)[] => { const { search, showPaused, kind } = filters return (search ? destinationsFuse.search(search).map((x) => x.item) : destinations).filter((dest) => { @@ -276,7 +298,7 @@ export const pipelineDestinationsLogic = kea([ hiddenDestinations: [ (s) => [s.destinations, s.filteredDestinations], - (destinations, filteredDestinations): Destination[] => { + (destinations, filteredDestinations): (Destination | SiteApp)[] => { return destinations.filter((dest) => !filteredDestinations.includes(dest)) }, ], @@ -298,7 +320,7 @@ export const pipelineDestinationsLogic = kea([ deleteNode: ({ destination }) => { switch (destination.backend) { case PipelineBackend.Plugin: - actions.deleteNodeWebhook(destination) + actions.deleteNodeWebhook(destination as WebhookDestination) break case PipelineBackend.BatchExport: actions.deleteNodeBatchExport(destination) diff --git a/frontend/src/scenes/pipeline/destinations/newDestinationsLogic.tsx b/frontend/src/scenes/pipeline/destinations/newDestinationsLogic.tsx index 2aed66735b44b..e4b6bd8db6c24 100644 --- a/frontend/src/scenes/pipeline/destinations/newDestinationsLogic.tsx +++ b/frontend/src/scenes/pipeline/destinations/newDestinationsLogic.tsx @@ -1,5 +1,5 @@ import FuseClass from 'fuse.js' -import { actions, afterMount, connect, kea, path, selectors } from 'kea' +import { actions, afterMount, connect, kea, key, path, props, selectors } from 'kea' import { loaders } from 'kea-loaders' import { combineUrl, router } from 'kea-router' import api from 'lib/api' @@ -18,9 +18,11 @@ import { import { humanizeBatchExportName } from '../batch-exports/utils' import { HogFunctionIcon } from '../hogfunctions/HogFunctionIcon' +import { hogFunctionTypeToPipelineStage } from '../hogfunctions/urls' import { PipelineBackend } from '../types' import { RenderBatchExportIcon } from '../utils' import { destinationsFiltersLogic } from './destinationsFiltersLogic' +import { PipelineDestinationsLogicProps } from './destinationsLogic' import type { newDestinationsLogicType } from './newDestinationsLogicType' export type NewDestinationItemType = { @@ -37,18 +39,31 @@ export interface Fuse extends FuseClass {} export const newDestinationsLogic = kea([ path(() => ['scenes', 'pipeline', 'destinations', 'newDestinationsLogic']), - connect({ - values: [userLogic, ['user'], featureFlagLogic, ['featureFlags'], destinationsFiltersLogic, ['filters']], - }), + props({} as PipelineDestinationsLogicProps), + key((props) => props.types.join(',') ?? ''), + connect(({ types }: PipelineDestinationsLogicProps) => ({ + values: [ + userLogic, + ['user'], + featureFlagLogic, + ['featureFlags'], + destinationsFiltersLogic({ types }), + ['filters'], + ], + })), actions({ openFeedbackDialog: true, }), - loaders({ + loaders(({ props, values }) => ({ hogFunctionTemplates: [ {} as Record, { loadHogFunctionTemplates: async () => { - const templates = await api.hogFunctions.listTemplates() + const siteDesinationsEnabled = !!values.featureFlags[FEATURE_FLAGS.SITE_DESTINATIONS] + const destinationTypes = siteDesinationsEnabled + ? props.types + : props.types.filter((type) => type !== 'site_destination') + const templates = await api.hogFunctions.listTemplates(destinationTypes) return templates.results.reduce((acc, template) => { acc[template.id] = template return acc @@ -56,13 +71,18 @@ export const newDestinationsLogic = kea([ }, }, ], - }), + })), selectors(() => ({ loading: [(s) => [s.hogFunctionTemplatesLoading], (hogFunctionTemplatesLoading) => hogFunctionTemplatesLoading], + types: [() => [(_, p) => p.types], (types) => types], batchExportServiceNames: [ - (s) => [s.user, s.featureFlags], - (user, featureFlags): BatchExportService['type'][] => { + (s) => [s.user, s.featureFlags, s.types], + (user, featureFlags, types): BatchExportService['type'][] => { + // Only add batch exports on the "destinations" page + if (!types.includes('destination')) { + return [] + } const httpEnabled = featureFlags[FEATURE_FLAGS.BATCH_EXPORTS_POSTHOG_HTTP] || user?.is_impersonated || user?.is_staff // HTTP is currently only used for Cloud to Cloud migrations and shouldn't be accessible to users @@ -82,7 +102,10 @@ export const newDestinationsLogic = kea([ description: hogFunction.description, backend: PipelineBackend.HogFunction as const, url: combineUrl( - urls.pipelineNodeNew(PipelineStage.Destination, `hog-${hogFunction.id}`), + urls.pipelineNodeNew( + hogFunctionTypeToPipelineStage(hogFunction.type), + `hog-${hogFunction.id}` + ), {}, hashParams ).url, diff --git a/frontend/src/scenes/pipeline/frontendAppsLogic.tsx b/frontend/src/scenes/pipeline/frontendAppsLogic.tsx index ae1ad42d61db7..7499651698154 100644 --- a/frontend/src/scenes/pipeline/frontendAppsLogic.tsx +++ b/frontend/src/scenes/pipeline/frontendAppsLogic.tsx @@ -1,7 +1,7 @@ import { actions, afterMount, connect, kea, path, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { userLogic } from 'scenes/userLogic' import { PipelineStage, PluginConfigTypeNew, PluginConfigWithPluginInfoNew, PluginType } from '~/types' @@ -13,7 +13,7 @@ import { capturePluginEvent, checkPermissions, loadPluginsFromUrl } from './util export const frontendAppsLogic = kea([ path(['scenes', 'pipeline', 'frontendAppsLogic']), connect({ - values: [teamLogic, ['currentTeamId'], userLogic, ['user']], + values: [projectLogic, ['currentProjectId'], userLogic, ['user']], }), actions({ loadPluginConfigs: true, @@ -33,7 +33,7 @@ export const frontendAppsLogic = kea([ { loadPluginConfigs: async () => { const res: PluginConfigTypeNew[] = await api.loadPaginatedResults( - `api/projects/${values.currentTeamId}/pipeline_frontend_apps_configs` + `api/projects/${values.currentProjectId}/pipeline_frontend_apps_configs` ) return Object.fromEntries(res.map((pluginConfig) => [pluginConfig.id, pluginConfig])) diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx index 2879e88e79e86..f837bc49fe7b3 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionConfiguration.tsx @@ -151,11 +151,14 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur return } - const showFilters = type === 'destination' || type === 'broadcast' - const showExpectedVolume = type === 'destination' - const showEnabled = type === 'destination' || type === 'email' - const canEditSource = type === 'destination' || type === 'email' + const showFilters = type === 'destination' || type === 'site_destination' || type === 'broadcast' + const showExpectedVolume = type === 'destination' || type === 'site_destination' + const showStatus = type === 'destination' || type === 'email' + const showEnabled = type === 'destination' || type === 'email' || type === 'site_destination' || type === 'site_app' + const canEditSource = + type === 'destination' || type === 'email' || type === 'site_destination' || type === 'site_app' const showPersonsCount = type === 'broadcast' + const showTesting = type === 'destination' || type === 'broadcast' || type === 'email' return (
    @@ -210,7 +213,7 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur {template && }
    - {showEnabled && } + {showStatus && } {showEnabled && ( {({ value, onChange }) => ( @@ -236,7 +239,7 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur - {hogFunction?.template ? ( + {hogFunction?.template && !hogFunction.template.id.startsWith('template-blank-') ? ( {({ value, onChange }) => ( <> - - This is the underlying Hog code that will run whenever the - filters match.{' '} - See the docs for - more info - + {!type.startsWith('site_') ? ( + + This is the underlying Hog code that will run whenever the + filters match.{' '} + See the docs{' '} + for more info + + ) : null} onChange(v ?? '')} globals={globalsWithInputs} @@ -489,8 +494,13 @@ export function HogFunctionConfiguration({ templateId, id }: HogFunctionConfigur ) : null}
    )} - - {!id || id === 'new' ? : } + {showTesting ? ( + !id || id === 'new' ? ( + + ) : ( + + ) + ) : null}
    {saveButtons}
    diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx index 92c1729a080c2..31dc8190bfc63 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionInputs.tsx @@ -313,7 +313,7 @@ export function HogFunctionInputWithSchema({ schema }: HogFunctionInputWithSchem const { attributes, listeners, setNodeRef, transform, transition } = useSortable({ id: schema.key }) const { showSource, configuration } = useValues(hogFunctionConfigurationLogic) const { setConfigurationValue } = useActions(hogFunctionConfigurationLogic) - const [editing, setEditing] = useState(showSource) + const [editing, setEditing] = useState(false) const value = configuration.inputs?.[schema.key] @@ -390,7 +390,7 @@ export function HogFunctionInputWithSchema({ schema }: HogFunctionInputWithSchem {supportsTemplating && ( } noPadding className=" opacity-0 group-hover:opacity-100 p-1 transition-opacity" diff --git a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionStatusIndicator.tsx b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionStatusIndicator.tsx index 6a66d7025dbc2..81bd4b409a204 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/HogFunctionStatusIndicator.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/HogFunctionStatusIndicator.tsx @@ -64,11 +64,16 @@ export function HogFunctionStatusIndicator({ hogFunction }: HogFunctionStatusInd return null } - const { tagType, display, description } = hogFunction.status?.state - ? displayMap[hogFunction.status.state] - : hogFunction.enabled - ? DEFAULT_DISPLAY - : DISABLED_MANUALLY_DISPLAY + const { tagType, display, description } = + hogFunction.type === 'site_app' || hogFunction.type === 'site_destination' + ? hogFunction.enabled + ? displayMap[HogWatcherState.healthy] + : DISABLED_MANUALLY_DISPLAY + : hogFunction.status?.state + ? displayMap[hogFunction.status.state] + : hogFunction.enabled + ? DEFAULT_DISPLAY + : DISABLED_MANUALLY_DISPLAY return ( - Mock out async functions + Mock out HTTP requests diff --git a/frontend/src/scenes/pipeline/hogfunctions/filters/HogFunctionFilters.tsx b/frontend/src/scenes/pipeline/hogfunctions/filters/HogFunctionFilters.tsx index 681e63a6239b2..1dd5588045f54 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/filters/HogFunctionFilters.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/filters/HogFunctionFilters.tsx @@ -74,6 +74,8 @@ export function HogFunctionFilters(): JSX.Element { ) } + const showMasking = type === 'destination' + return (
    @@ -144,117 +146,119 @@ export function HogFunctionFilters(): JSX.Element { )} - - {({ value, onChange }) => ( -
    - - onChange({ - hash: val, - ttl: value?.ttl ?? 60 * 30, - }) - } - /> - {configuration.masking?.hash ? ( - <> -
    - of - onChange({ ...value, ttl: val })} - options={[ - { - value: 5 * 60, - label: '5 minutes', - }, - { - value: 15 * 60, - label: '15 minutes', - }, - { - value: 30 * 60, - label: '30 minutes', - }, - { - value: 60 * 60, - label: '1 hour', - }, - { - value: 2 * 60 * 60, - label: '2 hours', - }, - { - value: 4 * 60 * 60, - label: '4 hours', - }, - { - value: 8 * 60 * 60, - label: '8 hours', - }, - { - value: 12 * 60 * 60, - label: '12 hours', - }, - { - value: 24 * 60 * 60, - label: '24 hours', - }, - ]} - /> -
    -
    - or until - onChange({ ...value, threshold: val })} - options={[ - { - value: null, - label: 'Not set', - }, - { - value: 1000, - label: '1000 events', - }, - { - value: 10000, - label: '10,000 events', - }, - { - value: 100000, - label: '100,000 events', - }, - { - value: 1000000, - label: '1,000,000 events', - }, - ]} - /> -
    - - ) : null} -
    - )} -
    + {showMasking ? ( + + {({ value, onChange }) => ( +
    + + onChange({ + hash: val, + ttl: value?.ttl ?? 60 * 30, + }) + } + /> + {configuration.masking?.hash ? ( + <> +
    + of + onChange({ ...value, ttl: val })} + options={[ + { + value: 5 * 60, + label: '5 minutes', + }, + { + value: 15 * 60, + label: '15 minutes', + }, + { + value: 30 * 60, + label: '30 minutes', + }, + { + value: 60 * 60, + label: '1 hour', + }, + { + value: 2 * 60 * 60, + label: '2 hours', + }, + { + value: 4 * 60 * 60, + label: '4 hours', + }, + { + value: 8 * 60 * 60, + label: '8 hours', + }, + { + value: 12 * 60 * 60, + label: '12 hours', + }, + { + value: 24 * 60 * 60, + label: '24 hours', + }, + ]} + /> +
    +
    + or until + onChange({ ...value, threshold: val })} + options={[ + { + value: null, + label: 'Not set', + }, + { + value: 1000, + label: '1000 events', + }, + { + value: 10000, + label: '10,000 events', + }, + { + value: 100000, + label: '100,000 events', + }, + { + value: 1000000, + label: '1,000,000 events', + }, + ]} + /> +
    + + ) : null} +
    + )} +
    + ) : null}
    ) } diff --git a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx index 5bd3191c430a0..229abea7e424d 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/hogFunctionConfigurationLogic.tsx @@ -12,6 +12,7 @@ import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import posthog from 'posthog-js' import { asDisplay } from 'scenes/persons/person-utils' import { hogFunctionNewUrl, hogFunctionUrl } from 'scenes/pipeline/hogfunctions/urls' +import { projectLogic } from 'scenes/projectLogic' import { teamLogic } from 'scenes/teamLogic' import { userLogic } from 'scenes/userLogic' @@ -169,7 +170,14 @@ export const hogFunctionConfigurationLogic = kea ['scenes', 'pipeline', 'hogFunctionConfigurationLogic', id]), actions({ @@ -189,9 +197,10 @@ export const hogFunctionConfigurationLogic = kea ({ error }), }), - reducers({ + reducers(({ props }) => ({ showSource: [ - false, + // Show source by default for blank templates when creating a new function + !!(!props.id && props.templateId?.startsWith('template-blank-')), { setShowSource: (_, { showSource }) => showSource, }, @@ -226,7 +235,7 @@ export const hogFunctionConfigurationLogic = kea error, }, ], - }), + })), loaders(({ actions, props, values }) => ({ template: [ null as HogFunctionTemplateType | null, @@ -525,8 +534,8 @@ export const hogFunctionConfigurationLogic = kea [s.configuration, s.currentTeam, s.groupTypes], - (configuration, currentTeam, groupTypes): HogFunctionInvocationGlobals => { + (s) => [s.configuration, s.currentProject, s.groupTypes], + (configuration, currentProject, groupTypes): HogFunctionInvocationGlobals => { const currentUrl = window.location.href.split('#')[0] const eventId = uuid() const personId = uuid() @@ -541,7 +550,7 @@ export const hogFunctionConfigurationLogic = kea([ forms(({ props, actions, values }) => ({ testInvocation: { defaults: { - mock_async_functions: true, + mock_async_functions: false, } as HogFunctionTestInvocationForm, alwaysShowErrors: true, errors: ({ globals }) => { diff --git a/frontend/src/scenes/pipeline/hogfunctions/list/hogFunctionListLogic.tsx b/frontend/src/scenes/pipeline/hogfunctions/list/hogFunctionListLogic.tsx index 6487d26d98a52..44a20fdb1900e 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/list/hogFunctionListLogic.tsx +++ b/frontend/src/scenes/pipeline/hogfunctions/list/hogFunctionListLogic.tsx @@ -8,7 +8,7 @@ import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { objectsEqual } from 'lib/utils' import { deleteWithUndo } from 'lib/utils/deleteWithUndo' import { pipelineAccessLogic } from 'scenes/pipeline/pipelineAccessLogic' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { userLogic } from 'scenes/userLogic' import { HogFunctionType, HogFunctionTypeType } from '~/types' @@ -37,8 +37,8 @@ export const hogFunctionListLogic = kea([ path((id) => ['scenes', 'pipeline', 'hogFunctionListLogic', id]), connect({ values: [ - teamLogic, - ['currentTeamId'], + projectLogic, + ['currentProjectId'], userLogic, ['user', 'hasAvailableFeature'], pipelineAccessLogic, @@ -74,16 +74,11 @@ export const hogFunctionListLogic = kea([ [] as HogFunctionType[], { loadHogFunctions: async () => { - return ( - await api.hogFunctions.list({ - filters: values.filters?.filters, - type: props.type, - }) - ).results + return (await api.hogFunctions.list(values.filters?.filters, props.type)).results }, deleteHogFunction: async ({ hogFunction }) => { await deleteWithUndo({ - endpoint: `projects/${teamLogic.values.currentTeamId}/hog_functions`, + endpoint: `projects/${values.currentProjectId}/hog_functions`, object: { id: hogFunction.id, name: hogFunction.name, diff --git a/frontend/src/scenes/pipeline/hogfunctions/urls.ts b/frontend/src/scenes/pipeline/hogfunctions/urls.ts index 1374d68fca347..a26ce4a331d55 100644 --- a/frontend/src/scenes/pipeline/hogfunctions/urls.ts +++ b/frontend/src/scenes/pipeline/hogfunctions/urls.ts @@ -7,10 +7,10 @@ export function hogFunctionNewUrl(type: HogFunctionTypeType, template?: string): ? urls.messagingProviderNew(template) : type === 'broadcast' ? urls.messagingBroadcastNew() - : urls.pipelineNodeNew(PipelineStage.Destination, template ? `hog-${template}` : undefined) + : urls.pipelineNodeNew(hogFunctionTypeToPipelineStage(type), template ? `hog-${template}` : undefined) } -export function hogFunctionUrl(type: HogFunctionTypeType, id?: string): string { +export function hogFunctionUrl(type: HogFunctionTypeType | PipelineStage, id?: string): string { if (type === 'email') { return id ? urls.messagingProvider(id) : urls.messagingProviders() } else if (type === 'broadcast') { @@ -18,9 +18,27 @@ export function hogFunctionUrl(type: HogFunctionTypeType, id?: string): string { } return id ? urls.pipelineNode( - PipelineStage.Destination, + hogFunctionTypeToPipelineStage(type), id.startsWith('hog-') ? id : `hog-${id}`, PipelineNodeTab.Configuration ) : urls.pipeline(PipelineTab.Destinations) } + +// Supports both hog function types and pipeline stages themselves as input +export function hogFunctionTypeToPipelineStage(type: string): PipelineStage { + switch (type) { + case 'site_destination': + return PipelineStage.Destination + case 'site-destination': + return PipelineStage.Destination + case 'destination': + return PipelineStage.Destination + case 'site_app': + return PipelineStage.SiteApp + case 'site-app': + return PipelineStage.SiteApp + default: + return PipelineStage.Destination + } +} diff --git a/frontend/src/scenes/pipeline/importAppsLogic.tsx b/frontend/src/scenes/pipeline/importAppsLogic.tsx index ef7aef707c66a..ee8c7a7f4c33e 100644 --- a/frontend/src/scenes/pipeline/importAppsLogic.tsx +++ b/frontend/src/scenes/pipeline/importAppsLogic.tsx @@ -1,7 +1,7 @@ import { actions, afterMount, connect, kea, path, selectors } from 'kea' import { loaders } from 'kea-loaders' import api from 'lib/api' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import { userLogic } from 'scenes/userLogic' import { PipelineStage, PluginConfigTypeNew, PluginConfigWithPluginInfoNew, PluginType } from '~/types' @@ -13,7 +13,7 @@ import { capturePluginEvent, checkPermissions, loadPluginsFromUrl } from './util export const importAppsLogic = kea([ path(['scenes', 'pipeline', 'importAppsLogic']), connect({ - values: [teamLogic, ['currentTeamId'], userLogic, ['user']], + values: [projectLogic, ['currentProjectId'], userLogic, ['user']], }), actions({ loadPluginConfigs: true, @@ -33,7 +33,7 @@ export const importAppsLogic = kea([ { loadPluginConfigs: async () => { const res: PluginConfigTypeNew[] = await api.loadPaginatedResults( - `api/projects/${values.currentTeamId}/pipeline_import_apps_configs` + `api/projects/${values.currentProjectId}/pipeline_import_apps_configs` ) return Object.fromEntries(res.map((pluginConfig) => [pluginConfig.id, pluginConfig])) diff --git a/frontend/src/scenes/pipeline/overviewLogic.tsx b/frontend/src/scenes/pipeline/overviewLogic.tsx index 2b104c3004b8e..60ce106cb9860 100644 --- a/frontend/src/scenes/pipeline/overviewLogic.tsx +++ b/frontend/src/scenes/pipeline/overviewLogic.tsx @@ -1,6 +1,7 @@ import { connect, kea, path } from 'kea' import { teamLogic } from 'scenes/teamLogic' +import { DESTINATION_TYPES } from './destinations/constants' import { pipelineDestinationsLogic } from './destinations/destinationsLogic' import type { pipelineOverviewLogicType } from './overviewLogicType' import { pipelineTransformationsLogic } from './transformationsLogic' @@ -13,13 +14,13 @@ export const pipelineOverviewLogic = kea([ ['currentTeamId'], pipelineTransformationsLogic, ['loading as transformationsLoading', 'transformations'], - pipelineDestinationsLogic, + pipelineDestinationsLogic({ types: DESTINATION_TYPES }), ['loading as destinationsLoading', 'destinations'], ], actions: [ pipelineTransformationsLogic, ['loadPlugins as loadTransformationPlugins', 'loadPluginConfigs as loadTransformationPluginConfigs'], - pipelineDestinationsLogic, + pipelineDestinationsLogic({ types: DESTINATION_TYPES }), [ 'loadPlugins as loadDestinationPlugins', 'loadPluginConfigs as loadDestinationPluginConfigs', diff --git a/frontend/src/scenes/pipeline/pipelineAccessLogic.tsx b/frontend/src/scenes/pipeline/pipelineAccessLogic.tsx index 5c658302553da..1d8875dbbedfa 100644 --- a/frontend/src/scenes/pipeline/pipelineAccessLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineAccessLogic.tsx @@ -5,7 +5,7 @@ import { AvailableFeature } from '~/types' import { canConfigurePlugins, canGloballyManagePlugins } from './access' import type { pipelineAccessLogicType } from './pipelineAccessLogicType' -import { Destination, NewDestinationItemType, PipelineBackend } from './types' +import { Destination, NewDestinationItemType, PipelineBackend, SiteApp } from './types' export const pipelineAccessLogic = kea([ path(['scenes', 'pipeline', 'pipelineAccessLogic']), @@ -25,11 +25,13 @@ export const pipelineAccessLogic = kea([ canEnableDestination: [ (s) => [s.canEnableNewDestinations], - (canEnableNewDestinations): ((destination: Destination | NewDestinationItemType) => boolean) => { - return (destination: Destination | NewDestinationItemType) => { + (canEnableNewDestinations): ((destination: Destination | NewDestinationItemType | SiteApp) => boolean) => { + return (destination: Destination | NewDestinationItemType | SiteApp) => { return destination.backend === PipelineBackend.HogFunction ? ('hog_function' in destination - ? destination.hog_function.template?.status === 'free' + ? destination.hog_function.type === 'site_destination' || + destination.hog_function.type === 'site_app' || + destination.hog_function.template?.status === 'free' : destination.status === 'free') || canEnableNewDestinations : canEnableNewDestinations } diff --git a/frontend/src/scenes/pipeline/pipelineBatchExportConfigurationLogic.tsx b/frontend/src/scenes/pipeline/pipelineBatchExportConfigurationLogic.tsx index 7d0b3b405b43e..d2db4bc584484 100644 --- a/frontend/src/scenes/pipeline/pipelineBatchExportConfigurationLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineBatchExportConfigurationLogic.tsx @@ -10,6 +10,7 @@ import { DatabaseSchemaBatchExportTable } from '~/queries/schema' import { BatchExportConfiguration, BatchExportService, PipelineNodeTab, PipelineStage } from '~/types' import { humanizeBatchExportName } from './batch-exports/utils' +import { DESTINATION_TYPES } from './destinations/constants' import { pipelineDestinationsLogic } from './destinations/destinationsLogic' import { pipelineAccessLogic } from './pipelineAccessLogic' import type { pipelineBatchExportConfigurationLogicType } from './pipelineBatchExportConfigurationLogicType' @@ -410,7 +411,9 @@ export const pipelineBatchExportConfigurationLogic = kea { if (name[0] === 'json_config_file' && value) { diff --git a/frontend/src/scenes/pipeline/pipelineNodeMetricsLogic.tsx b/frontend/src/scenes/pipeline/pipelineNodeMetricsLogic.tsx index 226083f9a9481..e24825c6ae2cc 100644 --- a/frontend/src/scenes/pipeline/pipelineNodeMetricsLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelineNodeMetricsLogic.tsx @@ -2,7 +2,7 @@ import { actions, afterMount, connect, kea, key, listeners, path, props, reducer import { loaders } from 'kea-loaders' import api from 'lib/api' import { toParams } from 'lib/utils' -import { teamLogic } from 'scenes/teamLogic' +import { projectLogic } from 'scenes/projectLogic' import type { pipelineNodeMetricsLogicType } from './pipelineNodeMetricsLogicType' @@ -52,7 +52,7 @@ export const pipelineNodeMetricsLogic = kea([ key(({ id }: PipelineNodeMetricsProps) => id), path((id) => ['scenes', 'pipeline', 'appMetricsLogic', id]), connect({ - values: [teamLogic, ['currentTeamId']], + values: [projectLogic, ['currentProjectId']], }), actions({ setDateRange: (from: string | null, to: string | null) => ({ from, to }), @@ -67,9 +67,7 @@ export const pipelineNodeMetricsLogic = kea([ { loadMetrics: async () => { const params = toParams({ date_from: values.dateRange.from, date_to: values.dateRange.to }) - return await api.get( - `api/projects/${teamLogic.values.currentTeamId}/app_metrics/${props.id}?${params}` - ) + return await api.get(`api/projects/${values.currentProjectId}/app_metrics/${props.id}?${params}`) }, }, ], @@ -79,7 +77,7 @@ export const pipelineNodeMetricsLogic = kea([ openErrorDetailsModal: async ({ errorType }) => { const params = toParams({ error_type: errorType }) const { result } = await api.get( - `api/projects/${teamLogic.values.currentTeamId}/app_metrics/${props.id}/error_details?${params}` + `api/projects/${values.currentProjectId}/app_metrics/${props.id}/error_details?${params}` ) return result }, diff --git a/frontend/src/scenes/pipeline/pipelinePluginConfigurationLogic.tsx b/frontend/src/scenes/pipeline/pipelinePluginConfigurationLogic.tsx index 265de21a1dbd4..721e447dc9f93 100644 --- a/frontend/src/scenes/pipeline/pipelinePluginConfigurationLogic.tsx +++ b/frontend/src/scenes/pipeline/pipelinePluginConfigurationLogic.tsx @@ -4,6 +4,7 @@ import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import { beforeUnload, router } from 'kea-router' import api from 'lib/api' +import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { teamLogic } from 'scenes/teamLogic' import { urls } from 'scenes/urls' @@ -16,6 +17,7 @@ import { determineRequiredFields, getPluginConfigFormData, } from './configUtils' +import { DESTINATION_TYPES, SITE_APP_TYPES } from './destinations/constants' import { pipelineDestinationsLogic } from './destinations/destinationsLogic' import { frontendAppsLogic } from './frontendAppsLogic' import { importAppsLogic } from './importAppsLogic' @@ -169,9 +171,18 @@ export const pipelinePluginConfigurationLogic = kea([ path(['scenes', 'pipeline', 'transformationsLogic']), connect({ - values: [teamLogic, ['currentTeamId'], userLogic, ['user']], + values: [projectLogic, ['currentProjectId'], userLogic, ['user']], }), actions({ loadPluginConfigs: true, @@ -47,7 +47,7 @@ export const pipelineTransformationsLogic = kea { const res = await api.loadPaginatedResults( - `api/projects/${values.currentTeamId}/pipeline_transformation_configs` + `api/projects/${values.currentProjectId}/pipeline_transformation_configs` ) return Object.fromEntries(res.map((pluginConfig) => [pluginConfig.id, pluginConfig])) diff --git a/frontend/src/scenes/pipeline/types.ts b/frontend/src/scenes/pipeline/types.ts index 2ace2b479da5f..f3f88cdc16f95 100644 --- a/frontend/src/scenes/pipeline/types.ts +++ b/frontend/src/scenes/pipeline/types.ts @@ -76,7 +76,7 @@ export type NewDestinationItemType = { name: string description: string backend: PipelineBackend - status?: 'stable' | 'beta' | 'alpha' | 'free' | 'deprecated' + status?: 'stable' | 'beta' | 'alpha' | 'free' | 'deprecated' | 'client-side' } export type NewDestinationFilters = { @@ -84,7 +84,6 @@ export type NewDestinationFilters = { kind?: PipelineBackend } -// Legacy: Site apps export interface SiteApp extends PluginBasedNode { stage: PipelineStage.SiteApp } @@ -131,7 +130,12 @@ export function convertToPipelineNode( stage: stage as PipelineStage.Destination, backend: PipelineBackend.HogFunction, interval: 'realtime', - id: candidate.type === 'destination' ? `hog-${candidate.id}` : candidate.id, + id: + candidate.type === 'destination' || + candidate.type === 'site_destination' || + candidate.type === 'site_app' + ? `hog-${candidate.id}` + : candidate.id, name: candidate.name, description: candidate.description, enabled: candidate.enabled, diff --git a/frontend/src/scenes/products/Products.tsx b/frontend/src/scenes/products/Products.tsx index e307b4b06b6a3..b1397e51769b0 100644 --- a/frontend/src/scenes/products/Products.tsx +++ b/frontend/src/scenes/products/Products.tsx @@ -81,7 +81,7 @@ export function Products(): JSX.Element { const { selectedProducts, firstProductOnboarding } = useValues(productsLogic) return ( -
    +
    <>
    diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx index 0072e558ffcc2..05f4c5c131668 100644 --- a/frontend/src/scenes/saved-insights/SavedInsights.tsx +++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx @@ -47,6 +47,7 @@ import { deleteInsightWithUndo } from 'lib/utils/deleteWithUndo' import { SavedInsightsEmptyState } from 'scenes/insights/EmptyStates' import { useSummarizeInsight } from 'scenes/insights/summarizeInsight' import { organizationLogic } from 'scenes/organizationLogic' +import { projectLogic } from 'scenes/projectLogic' import { overlayForNewInsightMenu } from 'scenes/saved-insights/newInsightsMenu' import { SavedInsightsFilters } from 'scenes/saved-insights/SavedInsightsFilters' import { SceneExport } from 'scenes/sceneTypes' @@ -56,7 +57,6 @@ import { NodeKind } from '~/queries/schema' import { isNodeWithSource } from '~/queries/utils' import { ActivityScope, InsightType, LayoutView, QueryBasedInsightModel, SavedInsightsTabs } from '~/types' -import { teamLogic } from '../teamLogic' import { INSIGHTS_PER_PAGE, savedInsightsLogic } from './savedInsightsLogic' interface NewInsightButtonProps { @@ -296,12 +296,6 @@ export const QUERY_TYPES_METADATA: Record = { icon: IconPieChart, inMenu: true, }, - [NodeKind.WebTopClicksQuery]: { - name: 'Top Clicks', - description: 'View top clicks for a website', - icon: IconPieChart, - inMenu: true, - }, [NodeKind.WebGoalsQuery]: { name: 'Goals', description: 'View goal conversions', @@ -431,7 +425,7 @@ export function NewInsightButton({ dataAttr }: NewInsightButtonProps): JSX.Eleme function SavedInsightsGrid(): JSX.Element { const { loadInsights, renameInsight, duplicateInsight } = useActions(savedInsightsLogic) const { insights, insightsLoading, pagination } = useValues(savedInsightsLogic) - const { currentTeamId } = useValues(teamLogic) + const { currentProjectId } = useValues(projectLogic) const paginationState = usePagination(insights?.results || [], pagination) @@ -448,7 +442,7 @@ function SavedInsightsGrid(): JSX.Element { deleteWithUndo={async () => await deleteInsightWithUndo({ object: insight, - endpoint: `projects/${currentTeamId}/insights`, + endpoint: `projects/${currentProjectId}/insights`, callback: loadInsights, }) } @@ -476,7 +470,7 @@ export function SavedInsights(): JSX.Element { const { insights, count, insightsLoading, filters, sorting, pagination, alertModalId } = useValues(savedInsightsLogic) const { hasTagging } = useValues(organizationLogic) - const { currentTeamId } = useValues(teamLogic) + const { currentProjectId } = useValues(projectLogic) const summarizeInsight = useSummarizeInsight() const { tab, layoutView, page } = filters @@ -594,7 +588,7 @@ export function SavedInsights(): JSX.Element { onClick={() => void deleteInsightWithUndo({ object: insight, - endpoint: `projects/${currentTeamId}/insights`, + endpoint: `projects/${currentProjectId}/insights`, callback: loadInsights, }) } diff --git a/frontend/src/scenes/session-recordings/SessionRecordings.tsx b/frontend/src/scenes/session-recordings/SessionRecordings.tsx index b720e5eff097a..db612745ab2a0 100644 --- a/frontend/src/scenes/session-recordings/SessionRecordings.tsx +++ b/frontend/src/scenes/session-recordings/SessionRecordings.tsx @@ -24,7 +24,6 @@ import { urls } from 'scenes/urls' import { sidePanelSettingsLogic } from '~/layout/navigation-3000/sidepanel/panels/sidePanelSettingsLogic' import { AvailableFeature, NotebookNodeType, ReplayTabs } from '~/types' -import { SessionRecordingErrors } from './errors/SessionRecordingErrors' import { createPlaylist } from './playlist/playlistUtils' import { SessionRecordingsPlaylist } from './playlist/SessionRecordingsPlaylist' import { SavedSessionRecordingPlaylists } from './saved-playlists/SavedSessionRecordingPlaylists' @@ -196,8 +195,6 @@ function MainPanel(): JSX.Element {
    ) : tab === ReplayTabs.Playlists ? ( - ) : tab === ReplayTabs.Errors ? ( - ) : tab === ReplayTabs.Templates ? ( ) : null} diff --git a/frontend/src/scenes/session-recordings/apm/playerInspector/ItemPerformanceEvent.tsx b/frontend/src/scenes/session-recordings/apm/playerInspector/ItemPerformanceEvent.tsx index 622d3fada3bb5..dc118953ea57d 100644 --- a/frontend/src/scenes/session-recordings/apm/playerInspector/ItemPerformanceEvent.tsx +++ b/frontend/src/scenes/session-recordings/apm/playerInspector/ItemPerformanceEvent.tsx @@ -66,8 +66,8 @@ export interface ItemPerformanceEventProps { finalTimestamp: Dayjs | null } -function renderTimeBenchmark(milliseconds: number): JSX.Element { - return ( +function renderTimeBenchmark(milliseconds: number | null): JSX.Element | null { + return milliseconds === null ? null : ( = 2000, @@ -107,14 +107,20 @@ function StartedAt({ item }: { item: PerformanceEvent }): JSX.Element | null { ) : null } -function DurationDescription({ item }: { item: PerformanceEvent }): JSX.Element | null { +function durationMillisecondsFrom(item: PerformanceEvent): number | null { let duration = item.duration if (duration === undefined && item.end_time !== undefined && item.start_time !== undefined) { duration = item.end_time - item.start_time } - if (duration === undefined) { + return duration ?? null +} + +function DurationDescription({ item }: { item: PerformanceEvent }): JSX.Element | null { + const duration = durationMillisecondsFrom(item) + if (duration === null) { return null } + return ( <> took {humanFriendlyMilliseconds(duration)} @@ -153,7 +159,7 @@ export function ItemPerformanceEvent({ item, finalTimestamp }: ItemPerformanceEv const sizeInfo = itemSizeInfo(item) const startTime = item.start_time || item.fetch_start || 0 - const duration = item.duration || item.end_time === undefined ? 0 : item.end_time - startTime + const duration = durationMillisecondsFrom(item) const callerOrigin = isURL(item.current_url) ? new URL(item.current_url).origin : undefined const eventName = item.name || '(empty string)' @@ -185,7 +191,7 @@ export function ItemPerformanceEvent({ item, finalTimestamp }: ItemPerformanceEv // eslint-disable-next-line react/forbid-dom-props style={{ left: `${(startTime / contextLengthMs) * 100}%`, - width: `${Math.max((duration / contextLengthMs) * 100, 0.5)}%`, + width: `${Math.max(((duration ?? 0) / contextLengthMs) * 100, 0.5)}%`, }} /> {item.entry_type === 'navigation' ? ( @@ -285,6 +291,7 @@ export function ItemPerformanceEventDetail({ item }: ItemPerformanceEventProps): setActiveTab(newKey)} tabs={[ diff --git a/frontend/src/scenes/session-recordings/errors/SessionRecordingErrors.tsx b/frontend/src/scenes/session-recordings/errors/SessionRecordingErrors.tsx deleted file mode 100644 index 75c96333ac580..0000000000000 --- a/frontend/src/scenes/session-recordings/errors/SessionRecordingErrors.tsx +++ /dev/null @@ -1,181 +0,0 @@ -import { IconFeatures } from '@posthog/icons' -import { LemonButton, LemonTable, LemonTabs } from '@posthog/lemon-ui' -import { captureException } from '@sentry/react' -import { useActions, useValues } from 'kea' -import { JSONViewer } from 'lib/components/JSONViewer' -import { Sparkline } from 'lib/components/Sparkline' -import { useState } from 'react' -import { urls } from 'scenes/urls' - -import { sessionPlayerModalLogic } from '../player/modal/sessionPlayerModalLogic' -import { sessionRecordingErrorsLogic } from './sessionRecordingErrorsLogic' - -const MAX_TITLE_LENGTH = 75 - -export function SessionRecordingErrors(): JSX.Element { - const { openSessionPlayer } = useActions(sessionPlayerModalLogic) - const { errors, errorsLoading } = useValues(sessionRecordingErrorsLogic) - const { loadErrorClusters, createPlaylist } = useActions(sessionRecordingErrorsLogic) - - if (!errors && !errorsLoading) { - return ( - } onClick={() => loadErrorClusters()}> - Automagically find errors - - ) - } - - return ( - <> - { - const displayTitle = parseTitle(cluster.sample) - return ( -
    - {displayTitle} -
    - ) - }, - width: '50%', - }, - { - title: '', - render: (_, cluster) => { - return ( - - ) - }, - }, - { - title: 'Occurrences', - dataIndex: 'occurrences', - sorter: (a, b) => a.occurrences - b.occurrences, - }, - { - title: 'Sessions', - dataIndex: 'unique_sessions', - sorter: (a, b) => a.unique_sessions - b.unique_sessions, - }, - { - title: 'Viewed', - tooltip: "How many of these you've already viewed", - dataIndex: 'viewed', - render: function Render(_, cluster) { - return `${((cluster.viewed / cluster.unique_sessions) * 100).toFixed(0)}%` - }, - sorter: (a, b) => a.viewed / a.unique_sessions - b.viewed / b.unique_sessions, - }, - { - title: 'Actions', - render: function Render(_, cluster) { - return ( -
    - { - e.preventDefault() - openSessionPlayer({ id: cluster.session_ids[0] }) - }} - className="whitespace-nowrap" - type="primary" - > - Watch example - - { - createPlaylist( - `Examples of '${parseTitle(cluster.sample)}'`, - cluster.session_ids - ) - }} - className="whitespace-nowrap" - type="secondary" - tooltip="Create a playlist of recordings containing this issue" - > - Create playlist - -
    - ) - }, - }, - ]} - loading={errorsLoading} - dataSource={errors || []} - expandable={{ - expandedRowRender: (cluster) => , - }} - /> - - ) -} - -const ExpandedError = ({ error }: { error: string }): JSX.Element => { - const hasJson = isJSON(error) - const [activeTab, setActiveTab] = useState(hasJson ? 'json' : 'raw') - - return hasJson ? ( -
    - , - }, - { key: 'raw', label: 'Raw', content: {error} }, - ]} - /> -
    - ) : ( -
    -

    Example error

    -
    {error}
    -
    - ) -} - -function isJSON(str: string): boolean { - try { - JSON.parse(str) - return true - } catch { - return false - } -} - -function parseTitle(error: string): string { - let input - try { - const parsedError = JSON.parse(error) - input = parsedError.error || error - } catch { - input = error - } - - if (!input) { - return error - } - - try { - // TRICKY - after json parsing we might not have a string, - // since the JSON parser will helpfully convert to other types too e.g. have seen objects here - if (typeof input !== 'string') { - input = JSON.stringify(input) - } - - return input.split('\n')[0].trim().substring(0, MAX_TITLE_LENGTH) || error - } catch (e) { - captureException(e, { extra: { error }, tags: { feature: 'replay/error-clustering' } }) - return error - } -} diff --git a/frontend/src/scenes/session-recordings/errors/sessionRecordingErrorsLogic.ts b/frontend/src/scenes/session-recordings/errors/sessionRecordingErrorsLogic.ts deleted file mode 100644 index 49de62c7bf5c4..0000000000000 --- a/frontend/src/scenes/session-recordings/errors/sessionRecordingErrorsLogic.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { actions, afterMount, kea, listeners, path } from 'kea' -import { loaders } from 'kea-loaders' -import { router } from 'kea-router' -import api from 'lib/api' -import { urls } from 'scenes/urls' - -import { ErrorClusterResponse } from '~/types' - -import { createPlaylist } from '../playlist/playlistUtils' -import type { sessionRecordingErrorsLogicType } from './sessionRecordingErrorsLogicType' - -export const sessionRecordingErrorsLogic = kea([ - path(['scenes', 'session-recordings', 'detail', 'sessionRecordingErrorsLogic']), - actions({ - createPlaylist: (name: string, sessionIds: string[]) => ({ name, sessionIds }), - }), - loaders(() => ({ - errors: [ - null as ErrorClusterResponse, - { - loadErrorClusters: async (refresh: boolean = true) => { - const response = await api.recordings.errorClusters(refresh) - return response - }, - }, - ], - })), - listeners(() => ({ - createPlaylist: async ({ name, sessionIds }) => { - const playlist = await createPlaylist({ name: name }) - - if (playlist) { - const samples = sessionIds.slice(0, 10) - await Promise.all( - samples.map((sessionId) => api.recordings.addRecordingToPlaylist(playlist.short_id, sessionId)) - ) - router.actions.push(urls.replayPlaylist(playlist.short_id)) - } - }, - })), - afterMount(({ actions }) => { - actions.loadErrorClusters(false) - }), -]) diff --git a/frontend/src/scenes/session-recordings/filters/RecordingsUniversalFilters.tsx b/frontend/src/scenes/session-recordings/filters/RecordingsUniversalFilters.tsx index 6dfa6d007949f..989b726851070 100644 --- a/frontend/src/scenes/session-recordings/filters/RecordingsUniversalFilters.tsx +++ b/frontend/src/scenes/session-recordings/filters/RecordingsUniversalFilters.tsx @@ -11,6 +11,7 @@ import { TestAccountFilter } from 'scenes/insights/filters/TestAccountFilter' import { actionsModel } from '~/models/actionsModel' import { cohortsModel } from '~/models/cohortsModel' import { AndOrFilterSelect } from '~/queries/nodes/InsightViz/PropertyGroupFilters/AndOrFilterSelect' +import { NodeKind } from '~/queries/schema' import { RecordingUniversalFilters, UniversalFiltersGroup } from '~/types' import { DurationFilter } from './DurationFilter' @@ -19,16 +20,32 @@ export const RecordingsUniversalFilters = ({ filters, setFilters, className, + allowReplayHogQLFilters = false, }: { filters: RecordingUniversalFilters setFilters: (filters: Partial) => void className?: string + allowReplayFlagsFilters?: boolean + allowReplayHogQLFilters?: boolean }): JSX.Element => { useMountedLogic(cohortsModel) useMountedLogic(actionsModel) const durationFilter = filters.duration[0] + const taxonomicGroupTypes = [ + TaxonomicFilterGroupType.Replay, + TaxonomicFilterGroupType.Events, + TaxonomicFilterGroupType.Actions, + TaxonomicFilterGroupType.Cohorts, + TaxonomicFilterGroupType.PersonProperties, + TaxonomicFilterGroupType.SessionProperties, + ] + + if (allowReplayHogQLFilters) { + taxonomicGroupTypes.push(TaxonomicFilterGroupType.HogQLExpression) + } + return (
    @@ -102,14 +119,7 @@ export const RecordingsUniversalFilters = ({ setFilters({ filter_group: filterGroup })} > @@ -144,6 +154,7 @@ const RecordingsUniversalFilterGroup = (): JSX.Element => { onRemove={() => removeGroupValue(index)} onChange={(value) => replaceGroupValue(index, value)} initiallyOpen={allowInitiallyOpen} + metadataSource={{ kind: NodeKind.RecordingsQuery }} /> ) })} diff --git a/frontend/src/scenes/session-recordings/player/PlayerFrameOverlay.tsx b/frontend/src/scenes/session-recordings/player/PlayerFrameOverlay.tsx index f869661481789..6337aafc995af 100644 --- a/frontend/src/scenes/session-recordings/player/PlayerFrameOverlay.tsx +++ b/frontend/src/scenes/session-recordings/player/PlayerFrameOverlay.tsx @@ -3,7 +3,6 @@ import './PlayerFrameOverlay.scss' import { IconPlay } from '@posthog/icons' import clsx from 'clsx' import { useActions, useValues } from 'kea' -import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' import { IconErrorOutline, IconSync } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { useState } from 'react' @@ -13,7 +12,6 @@ import { getCurrentExporterData } from '~/exporter/exporterViewLogic' import { SessionPlayerState } from '~/types' import { PlayerUpNext } from './PlayerUpNext' -import { SimilarRecordings } from './SimilarRecordings' const PlayerFrameOverlayContent = (): JSX.Element | null => { const { currentPlayerState, endReached } = useValues(sessionRecordingPlayerLogic) @@ -84,7 +82,6 @@ const PlayerFrameOverlayContent = (): JSX.Element | null => { export function PlayerFrameOverlay(): JSX.Element { const { playlistLogic } = useValues(sessionRecordingPlayerLogic) const { togglePlayPause } = useActions(sessionRecordingPlayerLogic) - const hasSimilarRecordings = useFeatureFlag('REPLAY_SIMILAR_RECORDINGS') const [interrupted, setInterrupted] = useState(false) @@ -96,7 +93,6 @@ export function PlayerFrameOverlay(): JSX.Element { onMouseOut={() => setInterrupted(false)} > - {hasSimilarRecordings && } {playlistLogic ? ( { const { logicProps } = useValues(sessionRecordingPlayerLogic) const { exportRecordingToFile, deleteRecording, setIsFullScreen } = useActions(sessionRecordingPlayerLogic) - const { fetchSimilarRecordings } = useActions(sessionRecordingDataLogic(logicProps)) const hasMobileExportFlag = useFeatureFlag('SESSION_REPLAY_EXPORT_MOBILE_DATA') const hasMobileExport = window.IMPERSONATED_SESSION || hasMobileExportFlag - const hasSimilarRecordings = useFeatureFlag('REPLAY_SIMILAR_RECORDINGS') const onDelete = (): void => { setIsFullScreen(false) @@ -208,12 +196,6 @@ const MenuActions = (): JSX.Element => { 'DEBUG ONLY - Export untransformed recording to a file. This can be loaded later into PostHog for playback.', icon: , }, - hasSimilarRecordings && { - label: 'Find similar recordings', - onClick: fetchSimilarRecordings, - icon: , - tooltip: 'DEBUG ONLY - Find similar recordings based on distance calculations via embeddings.', - }, logicProps.playerKey !== 'modal' && { label: 'Delete recording', status: 'danger', diff --git a/frontend/src/scenes/session-recordings/player/SimilarRecordings.tsx b/frontend/src/scenes/session-recordings/player/SimilarRecordings.tsx deleted file mode 100644 index 00d2bc58b5b3d..0000000000000 --- a/frontend/src/scenes/session-recordings/player/SimilarRecordings.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import { LemonButton, Spinner } from '@posthog/lemon-ui' -import { useValues } from 'kea' -import { urls } from 'scenes/urls' - -import { sessionRecordingDataLogic } from './sessionRecordingDataLogic' -import { sessionRecordingPlayerLogic } from './sessionRecordingPlayerLogic' - -export function SimilarRecordings(): JSX.Element | null { - const { logicProps } = useValues(sessionRecordingPlayerLogic) - const { similarRecordings, similarRecordingsLoading } = useValues(sessionRecordingDataLogic(logicProps)) - - if (!similarRecordings && !similarRecordingsLoading) { - return null - } - - return ( -
    - {similarRecordingsLoading ? ( - - ) : !!similarRecordings && similarRecordings?.length > 0 ? ( -
    - Watch similar recordings - {similarRecordings?.map(([id, similarity]) => ( - - {similarity} - - ))} -
    - ) : ( - No similar recordings found - )} -
    - ) -} diff --git a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx index 6cd133ea5d92d..d5236311b6a47 100644 --- a/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx +++ b/frontend/src/scenes/session-recordings/player/inspector/components/ItemEvent.tsx @@ -1,23 +1,24 @@ import './ImagePreview.scss' -import { LemonButton, LemonDivider, Tooltip } from '@posthog/lemon-ui' +import { LemonButton, LemonDivider, LemonTabs } from '@posthog/lemon-ui' import { useValues } from 'kea' import { ErrorDisplay } from 'lib/components/Errors/ErrorDisplay' +import { HTMLElementsDisplay } from 'lib/components/HTMLElementsDisplay/HTMLElementsDisplay' import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { TitledSnack } from 'lib/components/TitledSnack' import { IconOpenInNew } from 'lib/lemon-ui/icons' import { Spinner } from 'lib/lemon-ui/Spinner' -import { POSTHOG_EVENT_PROMOTED_PROPERTIES } from 'lib/taxonomy' +import { CORE_FILTER_DEFINITIONS_BY_GROUP, POSTHOG_EVENT_PROMOTED_PROPERTIES } from 'lib/taxonomy' import { autoCaptureEventToDescription, capitalizeFirstLetter, isString } from 'lib/utils' +import { AutocaptureImageTab, AutocapturePreviewImage, autocaptureToImage } from 'lib/utils/event-property-utls' +import { useState } from 'react' import { insightUrlForEvent } from 'scenes/insights/utils' import { eventPropertyFilteringLogic } from 'scenes/session-recordings/player/inspector/components/eventPropertyFilteringLogic' -import { DEFAULT_INSPECTOR_ROW_HEIGHT } from 'scenes/session-recordings/player/inspector/PlayerInspectorList' - -import { ElementType } from '~/types' import { InspectorListItemEvent } from '../playerInspectorLogic' import { SimpleKeyValueList } from './SimpleKeyValueList' + export interface ItemEventProps { item: InspectorListItemEvent } @@ -54,53 +55,6 @@ function SummarizeWebVitals({ properties }: { properties: Record }) ) } -function autocaptureToImage( - elements: ElementType[] -): null | { src: string | undefined; width: string | undefined; height: string | undefined } { - const find = elements.find((el) => el.tag_name === 'img') - const image = { - src: find?.attributes?.attr__src, - width: find?.attributes?.attr__width, - height: find?.attributes?.attr__height, - } - return image.src ? image : null -} - -function AutocaptureImage({ item }: ItemEventProps): JSX.Element | null { - const img = autocaptureToImage(item.data.elements) - if (img) { - return ( - - {/* Transparent grid background */} -
    - - {/* Image preview */} - Autocapture image src -
    - } - > - Autocapture image src -
    - ) - } - - return null -} - export function ItemEvent({ item }: ItemEventProps): JSX.Element { const subValue = item.data.event === '$pageview' ? ( @@ -110,7 +64,7 @@ export function ItemEvent({ item }: ItemEventProps): JSX.Element { ) : item.data.event === '$web_vitals' ? ( ) : item.data.elements.length ? ( - + ) : null return ( @@ -138,11 +92,26 @@ export function ItemEvent({ item }: ItemEventProps): JSX.Element { } export function ItemEventDetail({ item }: ItemEventProps): JSX.Element { + const [activeTab, setActiveTab] = useState<'properties' | 'flags' | 'image' | 'elements' | 'raw'>('properties') + const insightUrl = insightUrlForEvent(item.data) const { filterProperties } = useValues(eventPropertyFilteringLogic) const promotedKeys = POSTHOG_EVENT_PROMOTED_PROPERTIES[item.data.event] + const properties = {} + const featureFlagProperties = {} + + for (const key of Object.keys(item.data.properties)) { + if (!CORE_FILTER_DEFINITIONS_BY_GROUP.events[key] || !CORE_FILTER_DEFINITIONS_BY_GROUP.events[key].system) { + if (key.startsWith('$feature') || key === '$active_feature_flags') { + featureFlagProperties[key] = item.data.properties[key] + } else { + properties[key] = item.data.properties[key] + } + } + } + return (
    @@ -168,7 +137,59 @@ export function ItemEventDetail({ item }: ItemEventProps): JSX.Element { item.data.event === '$exception' ? ( ) : ( - + setActiveTab(newKey)} + tabs={[ + { + key: 'properties', + label: 'Properties', + content: ( + + ), + }, + { + key: 'flags', + label: 'Flags', + content: ( + + ), + }, + item.data.elements && item.data.elements.length > 0 + ? { + key: 'elements', + label: 'Elements', + content: ( + + ), + } + : null, + autocaptureToImage(item.data.elements) + ? { + key: 'image', + label: 'Image', + content: , + } + : null, + { + key: 'raw', + label: 'Raw', + content: ( +
    +                                            {JSON.stringify(item.data.properties, null, 2)}
    +                                        
    + ), + }, + ]} + /> ) ) : (
    diff --git a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts index 67871116cb224..10118ce5defdc 100644 --- a/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts +++ b/frontend/src/scenes/session-recordings/player/sessionRecordingDataLogic.ts @@ -727,14 +727,6 @@ export const sessionRecordingDataLogic = kea([ }, }, ], - similarRecordings: [ - null as [string, number][] | null, - { - fetchSimilarRecordings: async () => { - return await api.recordings.similarRecordings(props.sessionRecordingId) - }, - }, - ], })), listeners(({ values, actions, cache, props }) => ({ loadSnapshots: () => { diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx index cb0864f30131a..b3cf899123303 100644 --- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx +++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylist.tsx @@ -45,6 +45,7 @@ export function SessionRecordingsPlaylist(props: SessionRecordingPlaylistLogicPr const { featureFlags } = useValues(featureFlagLogic) const isTestingSaved = featureFlags[FEATURE_FLAGS.SAVED_NOT_PINNED] === 'test' + const allowReplayHogQLFilters = !!featureFlags[FEATURE_FLAGS.REPLAY_HOGQL_FILTERS] const pinnedDescription = isTestingSaved ? 'Saved' : 'Pinned' @@ -92,7 +93,12 @@ export function SessionRecordingsPlaylist(props: SessionRecordingPlaylistLogicPr
    {!notebookNode && ( - + )} { - const params: RecordingsQuery = { + // as_query is a temporary parameter as a flag + // to let the backend know not to convert the query to a legacy filter when processing + const params: RecordingsQuery & { as_query?: boolean } = { ...convertUniversalFiltersToRecordingsQuery(values.filters), person_uuid: props.personUUID ?? '', limit: RECORDINGS_LIMIT, @@ -347,6 +352,10 @@ export const sessionRecordingsPlaylistLogic = kea [(_, props) => props], (props): SessionRecordingPlaylistLogicProps => props], + listAPIAsQuery: [ + (s) => [s.featureFlags], + (featureFlags) => { + return !!featureFlags[FEATURE_FLAGS.REPLAY_LIST_RECORDINGS_AS_QUERY] + }, + ], + matchingEventsMatchType: [ (s) => [s.filters], (filters): MatchingEventsMatchType => { diff --git a/frontend/src/scenes/session-recordings/sessionReplaySceneLogic.ts b/frontend/src/scenes/session-recordings/sessionReplaySceneLogic.ts index c2dce12e7f9e0..5f1bee532fdaa 100644 --- a/frontend/src/scenes/session-recordings/sessionReplaySceneLogic.ts +++ b/frontend/src/scenes/session-recordings/sessionReplaySceneLogic.ts @@ -69,11 +69,8 @@ export const sessionReplaySceneLogic = kea([ tabs: [ (s) => [s.featureFlags], (featureFlags) => { - const hasErrorClustering = !!featureFlags[FEATURE_FLAGS.REPLAY_ERROR_CLUSTERING] const hasTemplates = !!featureFlags[FEATURE_FLAGS.REPLAY_TEMPLATES] - return Object.values(ReplayTabs).filter((tab) => - tab == ReplayTabs.Errors ? hasErrorClustering : tab == ReplayTabs.Templates ? hasTemplates : true - ) + return Object.values(ReplayTabs).filter((tab) => (tab == ReplayTabs.Templates ? hasTemplates : true)) }, ], breadcrumbs: [ diff --git a/frontend/src/scenes/settings/SettingsMap.tsx b/frontend/src/scenes/settings/SettingsMap.tsx index 3e4946ea1f8ac..488502b6e164d 100644 --- a/frontend/src/scenes/settings/SettingsMap.tsx +++ b/frontend/src/scenes/settings/SettingsMap.tsx @@ -136,7 +136,6 @@ export const SETTINGS_MAP: SettingSection[] = [ id: 'dead-clicks-autocapture', title: 'Dead clicks autocapture', component: , - flag: 'DEAD_CLICKS_AUTOCAPTURE', }, ], }, diff --git a/frontend/src/scenes/settings/environment/PathCleaningFiltersConfig.tsx b/frontend/src/scenes/settings/environment/PathCleaningFiltersConfig.tsx index 9ef7c81c18fca..ca5410ac0b6fc 100644 --- a/frontend/src/scenes/settings/environment/PathCleaningFiltersConfig.tsx +++ b/frontend/src/scenes/settings/environment/PathCleaningFiltersConfig.tsx @@ -18,7 +18,15 @@ export function PathCleaningFiltersConfig(): JSX.Element | null { } if (!hasAdvancedPaths) { - return

    Advanced path cleaning is a premium feature.

    + return ( +

    + Advanced path cleaning is a premium feature. Check{' '} + + our path cleaning rules documentation + {' '} + to learn more about it. +

    + ) } return ( @@ -32,6 +40,13 @@ export function PathCleaningFiltersConfig(): JSX.Element | null { path.

    +

    + You can check{' '} + + our path cleaning rules documentation + {' '} + to learn more about it. +

    Each rule is composed of an alias and a regex pattern. Any pattern in a URL or event name that matches the regex will be replaced with the alias. Rules are applied in the order that they're listed. diff --git a/frontend/src/scenes/settings/environment/TeamDangerZone.tsx b/frontend/src/scenes/settings/environment/TeamDangerZone.tsx index e4fe2a60f1fcd..339991fe1df0b 100644 --- a/frontend/src/scenes/settings/environment/TeamDangerZone.tsx +++ b/frontend/src/scenes/settings/environment/TeamDangerZone.tsx @@ -82,6 +82,12 @@ export function TeamDangerZone(): JSX.Element { return } + // We don't yet allow deleting individual environments, as we still use `team` fields with `on_delete=CASCADE` + // on many models that conceptually are project-level (such as insights or feature flags). That `on_delete=CASCADE` + // means currently deleting an environment would also delete resources a user wouldn't expect to disappear. + // TODO: Remove once point 15 ("Denormalize models") of https://github.com/PostHog/posthog/issues/13418#issuecomment-2180883524 is resolved + return Deletion of individual environments is coming soon. + return ( <>

    diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx index 277744efcfcb7..f064b83899bde 100644 --- a/frontend/src/scenes/surveys/SurveyView.tsx +++ b/frontend/src/scenes/surveys/SurveyView.tsx @@ -633,6 +633,7 @@ function SurveyNPSResults({ survey }: { survey: Survey }): JSX.Element { }, }, }} + readOnly={true} /> ) diff --git a/frontend/src/scenes/surveys/surveyViewViz.tsx b/frontend/src/scenes/surveys/surveyViewViz.tsx index 8f71b703919a8..8e19c575fef10 100644 --- a/frontend/src/scenes/surveys/surveyViewViz.tsx +++ b/frontend/src/scenes/surveys/surveyViewViz.tsx @@ -1,3 +1,4 @@ +import { offset } from '@floating-ui/react' import { IconInfo, IconSparkles, @@ -5,8 +6,9 @@ import { IconThumbsDownFilled, IconThumbsUp, IconThumbsUpFilled, + IconX, } from '@posthog/icons' -import { LemonButton, LemonTable, Spinner } from '@posthog/lemon-ui' +import { LemonButton, LemonTable, Popover, Spinner } from '@posthog/lemon-ui' import { BindLogic, useActions, useValues } from 'kea' import { FlaggedFeature } from 'lib/components/FlaggedFeature' import { FEATURE_FLAGS } from 'lib/constants' @@ -21,6 +23,7 @@ import { insightLogic } from 'scenes/insights/insightLogic' import { LineGraph } from 'scenes/insights/views/LineGraph/LineGraph' import { PieChart } from 'scenes/insights/views/LineGraph/PieChart' import { PersonDisplay } from 'scenes/persons/PersonDisplay' +import { surveyDataProcessingLogic } from 'scenes/surveys/suveyDataProcessingLogic' import { GraphType } from '~/types' import { InsightLogicProps, SurveyQuestionType } from '~/types' @@ -634,29 +637,76 @@ export function OpenTextViz({ } function ResponseSummariesButton({ questionIndex }: { questionIndex: number | undefined }): JSX.Element { + const [popOverClosed, setPopOverClosed] = useState(false) + const { summarize } = useActions(surveyLogic) const { responseSummary, responseSummaryLoading } = useValues(surveyLogic) - + const { surveyDataProcessingAccepted, surveyDataProcessingRefused } = useValues(surveyDataProcessingLogic) + const { acceptSurveyDataProcessing, refuseSurveyDataProcessing } = useActions(surveyDataProcessingLogic) + + const summarizeButton = ( + summarize({ questionIndex })} + disabledReason={ + surveyDataProcessingRefused + ? 'OpenAI processing refused' + : responseSummaryLoading + ? 'Let me think...' + : responseSummary + ? 'Already summarized' + : undefined + } + icon={} + > + {responseSummaryLoading ? ( + <> + Let me think... + + + ) : ( + <>Summarize responses + )} + + ) return ( - summarize({ questionIndex })} - disabledReason={ - responseSummaryLoading ? 'Let me think...' : responseSummary ? 'already summarized' : undefined - } - icon={} - > - {responseSummaryLoading ? ( - <> - Let me think... - - - ) : ( - <>Summarize responses - )} - + {surveyDataProcessingAccepted ? ( + summarizeButton + ) : ( + +
    + } onClick={() => setPopOverClosed(true)} /> +
    +
    +

    + Uses OpenAI services to analyze your survey responses, +
    + This can include personal data of your users, +
    + if they include it in their responses. +
    + Your data won't be used for training models. +

    +
    + acceptSurveyDataProcessing()}> + Got it, I accept OpenAI processing survey data + + refuseSurveyDataProcessing()}> + No thanks, I don't want OpenAI processing survey data + +
    + } + middleware={[offset(-12)]} + showArrow + visible={!popOverClosed && !surveyDataProcessingAccepted && !surveyDataProcessingRefused} + > + {summarizeButton} + + )} ) } @@ -686,7 +736,7 @@ function ResponseSummaryFeedback({ surveyId }: { surveyId: string }): JSX.Elemen return // Already rated } setRating(newRating) - posthog.capture('chat rating', { + posthog.capture('survey_resonse_rated', { survey_id: surveyId, answer_rating: rating, }) diff --git a/frontend/src/scenes/surveys/suveyDataProcessingLogic.ts b/frontend/src/scenes/surveys/suveyDataProcessingLogic.ts new file mode 100644 index 0000000000000..ceaa2dde22efd --- /dev/null +++ b/frontend/src/scenes/surveys/suveyDataProcessingLogic.ts @@ -0,0 +1,38 @@ +import { actions, kea, listeners, path, reducers } from 'kea' +import posthog from 'posthog-js' + +import type { surveyDataProcessingLogicType } from './suveyDataProcessingLogicType' + +export const surveyDataProcessingLogic = kea([ + path(['scenes', 'surveys', 'suveyDataProcessingLogic']), + actions({ + acceptSurveyDataProcessing: true, + refuseSurveyDataProcessing: true, + }), + reducers({ + surveyDataProcessingAccepted: [ + false, + { persist: true }, + { + acceptSurveyDataProcessing: () => true, + refuseSurveyDataProcessing: () => false, + }, + ], + surveyDataProcessingRefused: [ + false, + { persist: true }, + { + acceptSurveyDataProcessing: () => false, + refuseSurveyDataProcessing: () => true, + }, + ], + }), + listeners({ + acceptSurveyDataProcessing: () => { + posthog.capture('survey_data_processing_accepted') + }, + refuseSurveyDataProcessing: () => { + posthog.capture('survey_data_processing_refused') + }, + }), +]) diff --git a/frontend/src/scenes/web-analytics/WebDashboard.tsx b/frontend/src/scenes/web-analytics/WebDashboard.tsx index 2dbcebb20949a..f1e4cd05bc99f 100644 --- a/frontend/src/scenes/web-analytics/WebDashboard.tsx +++ b/frontend/src/scenes/web-analytics/WebDashboard.tsx @@ -129,8 +129,13 @@ const QueryTileItem = ({ tile }: { tile: QueryTile }): JSX.Element => { layout.className )} > - {title &&

    {title}

    } - {docs && } + {title && ( +

    + {title} + {docs && } +

    + )} +
    -

    +

    {activeTab?.title} {activeTab?.docs && ( @@ -275,12 +280,12 @@ export const WebTabs = ({ } export interface LearnMorePopoverProps { - docsURL: PostHogComDocsURL + url?: PostHogComDocsURL title: string description: string | JSX.Element } -export const LearnMorePopover = ({ docsURL, title, description }: LearnMorePopoverProps): JSX.Element => { +export const LearnMorePopover = ({ url, title, description }: LearnMorePopoverProps): JSX.Element => { const [isOpen, setIsOpen] = useState(false) return ( @@ -295,25 +300,27 @@ export const LearnMorePopover = ({ docsURL, title, description }: LearnMorePopov targetBlank type="tertiary" onClick={() => setIsOpen(false)} - size="xsmall" + size="small" icon={} />

    {description}
    -
    - setIsOpen(false)} - targetBlank={true} - sideIcon={} - > - Learn more - -
    + {url && ( +
    + setIsOpen(false)} + targetBlank={true} + sideIcon={} + > + Learn more + +
    + )}
    } > - setIsOpen(!isOpen)} size="small" icon={} /> + setIsOpen(!isOpen)} size="small" icon={} className="ml-1 mb-1" /> ) } diff --git a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx index e42d6c2de89cb..63ef78e423bce 100644 --- a/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx +++ b/frontend/src/scenes/web-analytics/tiles/WebAnalyticsTile.tsx @@ -1,11 +1,14 @@ -import { IconGear } from '@posthog/icons' +import { IconGear, IconTrending } from '@posthog/icons' +import { Link, Tooltip } from '@posthog/lemon-ui' +import clsx from 'clsx' import { useActions, useValues } from 'kea' +import { getColorVar } from 'lib/colors' import { IntervalFilterStandalone } from 'lib/components/IntervalFilter' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' -import { IconOpenInNew } from 'lib/lemon-ui/icons' +import { IconOpenInNew, IconTrendingDown, IconTrendingFlat } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonSwitch } from 'lib/lemon-ui/LemonSwitch' -import { UnexpectedNeverError } from 'lib/utils' +import { percentage, UnexpectedNeverError } from 'lib/utils' import { useCallback, useMemo } from 'react' import { NewActionButton } from 'scenes/actions/NewActionButton' import { countryCodeToFlag, countryCodeToName } from 'scenes/insights/views/WorldMap' @@ -37,15 +40,72 @@ const toUtcOffsetFormat = (value: number): string => { return `UTC${sign}${integerPart}${formattedMinutes}` } -const PercentageCell: QueryContextColumnComponent = ({ value }) => { - if (typeof value === 'number') { - return {`${(value * 100).toFixed(1)}%`} - } - return null -} +type VariationCellProps = { isPercentage?: boolean; reverseColors?: boolean } +const VariationCell = ( + { isPercentage, reverseColors }: VariationCellProps = { isPercentage: false, reverseColors: false } +): QueryContextColumnComponent => { + const formatNumber = (value: number): string => + isPercentage ? `${(value * 100).toFixed(1)}%` : value.toLocaleString() + + return function Cell({ value }) { + if (!value) { + return null + } + + if (!Array.isArray(value)) { + return {String(value)} + } + + const [current, previous] = value as [number, number] + const pctChangeFromPrevious = + previous === 0 && current === 0 // Special case, render as flatline + ? 0 + : current === null + ? null + : previous === null || previous === 0 + ? Infinity + : current / previous - 1 + + const trend = + pctChangeFromPrevious === null + ? null + : pctChangeFromPrevious === 0 + ? { Icon: IconTrendingFlat, color: getColorVar('muted') } + : pctChangeFromPrevious > 0 + ? { + Icon: IconTrending, + color: reverseColors ? getColorVar('danger') : getColorVar('success'), + } + : { + Icon: IconTrendingDown, + color: reverseColors ? getColorVar('success') : getColorVar('danger'), + } + + // If current === previous, say "increased by 0%" + const tooltip = + pctChangeFromPrevious !== null + ? `${current >= previous ? 'Increased' : 'Decreased'} by ${percentage( + Math.abs(pctChangeFromPrevious), + 0 + )} since last period (from ${formatNumber(previous)} to ${formatNumber(current)})` + : null -const NumericCell: QueryContextColumnComponent = ({ value }) => { - return {typeof value === 'number' ? value.toLocaleString() : String(value)} + return ( +
    + + + {formatNumber(current)}  + {trend && ( + // eslint-disable-next-line react/forbid-dom-props + + + + )} + + +
    + ) + } } const BreakdownValueTitle: QueryContextColumnTitleComponent = (props) => { @@ -227,48 +287,48 @@ export const webAnalyticsDataTableQueryContext: QueryContext = { render: BreakdownValueCell, }, bounce_rate: { - title: 'Bounce Rate', - render: PercentageCell, + title: Bounce Rate, + render: VariationCell({ isPercentage: true, reverseColors: true }), align: 'right', }, views: { - title: 'Views', - render: NumericCell, + title: Views, + render: VariationCell(), align: 'right', }, clicks: { - title: 'Clicks', - render: NumericCell, + title: Clicks, + render: VariationCell(), align: 'right', }, visitors: { - title: 'Visitors', - render: NumericCell, + title: Visitors, + render: VariationCell(), align: 'right', }, average_scroll_percentage: { - title: 'Average Scroll', - render: PercentageCell, + title: Average Scroll, + render: VariationCell({ isPercentage: true }), align: 'right', }, scroll_gt80_percentage: { - title: 'Deep Scroll Rate', - render: PercentageCell, + title: Deep Scroll Rate, + render: VariationCell({ isPercentage: true }), align: 'right', }, total_conversions: { - title: 'Total Conversions', - render: NumericCell, + title: Total Conversions, + render: VariationCell(), align: 'right', }, conversion_rate: { - title: 'Conversion Rate', - render: PercentageCell, + title: Conversion Rate, + render: VariationCell({ isPercentage: true }), align: 'right', }, converting_users: { - title: 'Converting Users', - render: NumericCell, + title: Converting Users, + render: VariationCell(), align: 'right', }, action_name: { @@ -463,7 +523,20 @@ export const WebStatsTableTile = ({ - Enable path cleaning + + Check{' '} + + our path cleaning rules documentation + {' '} + to learn more about path cleaning + + } + interactive + > + Enable path cleaning + } type="tertiary" diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx index ce12ec43126c4..d5d93400466d3 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.tsx @@ -87,10 +87,11 @@ const loadPriorityMap: Record = { interface BaseTile { tileId: TileId layout: WebTileLayout + docs?: Docs } export interface Docs { - docsUrl: PostHogComDocsURL + url?: PostHogComDocsURL title: string description: string | JSX.Element } @@ -103,7 +104,6 @@ export interface QueryTile extends BaseTile { insightProps: InsightLogicProps canOpenModal: boolean canOpenInsight?: boolean - docs?: Docs } export interface TabsTileTab { @@ -216,6 +216,9 @@ const getDashboardItemId = (section: TileId, tab: string | undefined, isModal?: // pretend to be a new-AdHoc to get the correct behaviour elsewhere return `new-AdHoc.web-analytics.${section}.${tab || 'default'}.${isModal ? 'modal' : 'default'}` } + +const teamId = window.POSTHOG_APP_CONTEXT?.current_team?.id +const persistConfig = { persist: true, prefix: `${teamId}__` } export const webAnalyticsLogic = kea([ path(['scenes', 'webAnalytics', 'webAnalyticsSceneLogic']), connect(() => ({ @@ -280,7 +283,7 @@ export const webAnalyticsLogic = kea([ reducers({ webAnalyticsFilters: [ initialWebAnalyticsFilter, - { persist: true }, + persistConfig, { setWebAnalyticsFilters: (_, { webAnalyticsFilters }) => webAnalyticsFilters, togglePropertyFilter: (oldPropertyFilters, { key, value, type }): WebAnalyticsPropertyFilters => { @@ -352,7 +355,7 @@ export const webAnalyticsLogic = kea([ ], _graphsTab: [ null as string | null, - { persist: true }, + persistConfig, { setGraphsTab: (_, { tab }) => tab, togglePropertyFilter: (oldTab, { tabChange }) => tabChange?.graphsTab || oldTab, @@ -360,7 +363,7 @@ export const webAnalyticsLogic = kea([ ], _sourceTab: [ null as string | null, - { persist: true }, + persistConfig, { setSourceTab: (_, { tab }) => tab, togglePropertyFilter: (oldTab, { tabChange }) => tabChange?.sourceTab || oldTab, @@ -368,7 +371,7 @@ export const webAnalyticsLogic = kea([ ], _deviceTab: [ null as string | null, - { persist: true }, + persistConfig, { setDeviceTab: (_, { tab }) => tab, togglePropertyFilter: (oldTab, { tabChange }) => tabChange?.deviceTab || oldTab, @@ -376,7 +379,7 @@ export const webAnalyticsLogic = kea([ ], _pathTab: [ null as string | null, - { persist: true }, + persistConfig, { setPathTab: (_, { tab }) => tab, togglePropertyFilter: (oldTab, { tabChange }) => tabChange?.pathTab || oldTab, @@ -384,7 +387,7 @@ export const webAnalyticsLogic = kea([ ], _geographyTab: [ null as string | null, - { persist: true }, + persistConfig, { setGeographyTab: (_, { tab }) => tab, togglePropertyFilter: (oldTab, { tabChange }) => tabChange?.geographyTab || oldTab, @@ -392,7 +395,7 @@ export const webAnalyticsLogic = kea([ ], isPathCleaningEnabled: [ null as boolean | null, - { persist: true }, + persistConfig, { setIsPathCleaningEnabled: (_, { isPathCleaningEnabled }) => isPathCleaningEnabled, }, @@ -413,7 +416,7 @@ export const webAnalyticsLogic = kea([ dateTo: initialDateTo, interval: initialInterval, }, - { persist: true }, + persistConfig, { setDates: (_, { dateTo, dateFrom }) => ({ dateTo, @@ -443,21 +446,21 @@ export const webAnalyticsLogic = kea([ ], shouldFilterTestAccounts: [ false as boolean, - { persist: true }, + persistConfig, { setShouldFilterTestAccounts: (_, { shouldFilterTestAccounts }) => shouldFilterTestAccounts, }, ], shouldStripQueryParams: [ false as boolean, - { persist: true }, + persistConfig, { setShouldStripQueryParams: (_, { shouldStripQueryParams }) => shouldStripQueryParams, }, ], conversionGoal: [ null as WebAnalyticsConversionGoal | null, - { persist: true }, + persistConfig, { setConversionGoal: (_, { conversionGoal }) => conversionGoal, }, @@ -755,6 +758,27 @@ export const webAnalyticsLogic = kea([ }, { showPathCleaningControls: true, + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#paths', + title: 'Paths', + description: ( +
    +

    + In this view you can validate all of the paths that were + accessed in your application, regardless of when they were + accessed through the lifetime of a user session. +

    +

    + The{' '} + + bounce rate + {' '} + indicates the percentage of users who left your page immediately + after visiting without capturing any event. +

    +
    + ), + }, } ), createTableTab( @@ -770,6 +794,16 @@ export const webAnalyticsLogic = kea([ }, { showPathCleaningControls: true, + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#paths', + title: 'Entry Path', + description: ( +
    + Entry paths are the paths a user session started, i.e. the first + path they saw when they opened your website. +
    + ), + }, } ), createTableTab( @@ -785,6 +819,17 @@ export const webAnalyticsLogic = kea([ }, { showPathCleaningControls: true, + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#paths', + title: 'End Path', + description: ( +
    + End paths are the last path a user visited before their session + ended, i.e. the last path they saw before leaving your + website/closing the browser/turning their computer off. +
    + ), + }, } ), featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_LAST_CLICK] @@ -809,6 +854,15 @@ export const webAnalyticsLogic = kea([ }, insightProps: createInsightProps(TileId.PATHS, PathTab.END_PATH), canOpenModal: true, + docs: { + title: 'Outbound Clicks', + description: ( +
    + You'll be able to verify when someone leaves your website by + clicking an outbound link (to a separate domain) +
    + ), + }, } : null, ] as (TabsTileTab | undefined)[] @@ -833,7 +887,7 @@ export const webAnalyticsLogic = kea([ {}, { docs: { - docsUrl: 'https://posthog.com/docs/data/channel-type', + url: 'https://posthog.com/docs/data/channel-type', title: 'Channels', description: (
    @@ -841,6 +895,21 @@ export const webAnalyticsLogic = kea([ Channels are the different sources that bring traffic to your website, e.g. Paid Search, Organic Social, Direct, etc.

    + {featureFlags[FEATURE_FLAGS.CUSTOM_CHANNEL_TYPE_RULES] && ( +

    + You can also{' '} + + create custom channel types + + , allowing you to further categorize your channels. +

    + )} +

    Something unexpected? Try the{' '} @@ -857,49 +926,136 @@ export const webAnalyticsLogic = kea([ SourceTab.REFERRING_DOMAIN, 'Referrers', 'Referring domain', - WebStatsBreakdown.InitialReferringDomain + WebStatsBreakdown.InitialReferringDomain, + {}, + { + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#referrers-channels-utms', + title: 'Referrers', + description: 'Understand where your users are coming from', + }, + } ), createTableTab( TileId.SOURCES, SourceTab.UTM_SOURCE, 'UTM sources', 'UTM source', - WebStatsBreakdown.InitialUTMSource + WebStatsBreakdown.InitialUTMSource, + {}, + { + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#utms', + title: 'UTM source', + description: ( + <> + Understand where your users are coming from - filtered down by their{' '} + utm_source parameter + + ), + }, + } ), createTableTab( TileId.SOURCES, SourceTab.UTM_MEDIUM, 'UTM medium', 'UTM medium', - WebStatsBreakdown.InitialUTMMedium + WebStatsBreakdown.InitialUTMMedium, + {}, + { + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#utms', + title: 'UTM medium', + description: ( + <> + Understand where your users are coming from - filtered down by their{' '} + utm_medium parameter + + ), + }, + } ), createTableTab( TileId.SOURCES, SourceTab.UTM_CAMPAIGN, 'UTM campaigns', 'UTM campaign', - WebStatsBreakdown.InitialUTMCampaign + WebStatsBreakdown.InitialUTMCampaign, + {}, + { + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#utms', + title: 'UTM campaign', + description: ( + <> + Understand where your users are coming from - filtered down by their{' '} + utm_campaign parameter + + ), + }, + } ), createTableTab( TileId.SOURCES, SourceTab.UTM_CONTENT, 'UTM content', 'UTM content', - WebStatsBreakdown.InitialUTMContent + WebStatsBreakdown.InitialUTMContent, + {}, + { + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#utms', + title: 'UTM content', + description: ( + <> + Understand where your users are coming from - filtered down by their{' '} + utm_content parameter + + ), + }, + } ), createTableTab( TileId.SOURCES, SourceTab.UTM_TERM, 'UTM terms', 'UTM term', - WebStatsBreakdown.InitialUTMTerm + WebStatsBreakdown.InitialUTMTerm, + {}, + { + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#utms', + title: 'UTM term', + description: ( + <> + Understand where your users are coming from - filtered down by their{' '} + utm_term parameter + + ), + }, + } ), createTableTab( TileId.SOURCES, SourceTab.UTM_SOURCE_MEDIUM_CAMPAIGN, 'Source / Medium / Campaign', 'UTM s/m/c', - WebStatsBreakdown.InitialUTMSourceMediumCampaign + WebStatsBreakdown.InitialUTMSourceMediumCampaign, + {}, + { + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#utms', + title: 'UTM parameters', + description: ( + <> + Understand where your users are coming from - filtered down by a tuple + of their utm_source, utm_medium, and{' '} + utm_campaign parameters + + ), + }, + } ), ], }, @@ -1045,6 +1201,28 @@ export const webAnalyticsLogic = kea([ insightProps: createInsightProps(TileId.RETENTION), canOpenInsight: false, canOpenModal: true, + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#retention', + title: 'Retention', + description: ( + <> +

    +

    + Retention creates a cohort of unique users who performed any event for the + first time in the last week. It then tracks the percentage of users who + return to perform any event in the following weeks. +

    +

    + You want the numbers numbers to be the highest possible, suggesting that + people that come to your page continue coming to your page - and performing + an actions. Also, the further down the table the higher the numbers should + be (or at least as high), which would indicate that you're either increasing + or keeping your retention at the same level. +

    +
    + + ), + }, }, featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_CONVERSION_GOALS] ? { @@ -1071,6 +1249,25 @@ export const webAnalyticsLogic = kea([ insightProps: createInsightProps(TileId.GOALS), canOpenInsight: false, canOpenModal: false, + docs: { + url: 'https://posthog.com/docs/web-analytics/dashboard#goals', + title: 'Goals', + description: ( + <> +
    +

    + Goals shows your pinned or most recently created actions and the + number of conversions they've had. You can set a custom event or + action as a{' '} + + conversion goal + {' '} + at the top of the dashboard for more specific metrics. +

    +
    + + ), + }, } : null, featureFlags[FEATURE_FLAGS.WEB_ANALYTICS_REPLAY] @@ -1080,6 +1277,12 @@ export const webAnalyticsLogic = kea([ layout: { colSpanClassName: 'md:col-span-1', }, + docs: { + url: 'https://posthog.com/docs/session-replay', + title: 'Session Replay', + description: + 'Play back sessions to diagnose UI issues, improve support, and get context for nuanced user behavior.', + }, } : null, featureFlags[FEATURE_FLAGS.ERROR_TRACKING] @@ -1090,7 +1293,7 @@ export const webAnalyticsLogic = kea([ colSpanClassName: 'md:col-span-1', }, query: errorTrackingQuery({ - order: 'users', + orderBy: 'users', dateRange: dateRange, filterTestAccounts: filterTestAccounts, filterGroup: replayFilters.filter_group, @@ -1098,6 +1301,25 @@ export const webAnalyticsLogic = kea([ columns: ['error', 'users', 'occurrences'], limit: 4, }), + docs: { + url: 'https://posthog.com/docs/error-tracking', + title: 'Error Tracking', + description: ( + <> +
    +

    + Error tracking allows you to track, investigate, and resolve + exceptions your customers face. +

    +

    + Errors are captured as $exception events which means that + you can create insights, filter recordings and trigger surveys based + on them exactly the same way you can for any other type of event. +

    +
    + + ), + }, } : null, ] diff --git a/frontend/src/stories/How to build a form.stories.mdx b/frontend/src/stories/How to build a form.stories.mdx index b8e20ca871966..4a700497af549 100644 --- a/frontend/src/stories/How to build a form.stories.mdx +++ b/frontend/src/stories/How to build a form.stories.mdx @@ -24,7 +24,7 @@ export const featureFlagLogic = kea> featureFlag: [ { ...NEW_FLAG } as FeatureFlagModel, { - loadFeatureFlag: () => api.get(`api/projects/${values.currentTeamId}/feature_flags/${props.id}`), + loadFeatureFlag: () => api.get(`api/projects/${values.currentProjectId}/feature_flags/${props.id}`), }, ], }), @@ -44,8 +44,8 @@ export const featureFlagLogic = kea> // eslint-disable-next-line @typescript-eslint/no-unused-vars const { created_at, id, ...flag } = featureFlag const newFeatureFlag = updatedFlag.id - ? await api.update(`api/projects/${values.currentTeamId}/feature_flags/${updatedFlag.id}`, flag) - : await api.create(`api/projects/${values.currentTeamId}/feature_flags`, flag) + ? await api.update(`api/projects/${values.currentProjectId}/feature_flags/${updatedFlag.id}`, flag) + : await api.create(`api/projects/${values.currentProjectId}/feature_flags`, flag) breakpoint() actions.setFeatureFlagValues(newFeatureFlag) lemonToast.success('Feature flag saved') diff --git a/frontend/src/types.ts b/frontend/src/types.ts index 42031efa47e5a..8ad57c8964ec9 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -688,7 +688,6 @@ export enum ReplayTabs { Templates = 'templates', Home = 'home', Playlists = 'playlists', - Errors = 'errors', } export enum ExperimentsTabs { @@ -4053,6 +4052,10 @@ export interface DataWarehouseViewLink { field_name?: string created_by?: UserBasicType | null created_at?: string | null + configuration?: { + experiments_optimized?: boolean + experiments_timestamp_key?: string | null + } } export enum DataWarehouseSettingsTab { @@ -4588,7 +4591,16 @@ export interface HogFunctionFiltersType { bytecode_error?: string } -export type HogFunctionTypeType = 'destination' | 'email' | 'sms' | 'push' | 'activity' | 'alert' | 'broadcast' +export type HogFunctionTypeType = + | 'destination' + | 'site_destination' + | 'site_app' + | 'email' + | 'sms' + | 'push' + | 'activity' + | 'alert' + | 'broadcast' export type HogFunctionType = { id: string @@ -4610,7 +4622,7 @@ export type HogFunctionType = { status?: HogFunctionStatus } -export type HogFunctionTemplateStatus = 'alpha' | 'beta' | 'stable' | 'free' | 'deprecated' +export type HogFunctionTemplateStatus = 'alpha' | 'beta' | 'stable' | 'free' | 'deprecated' | 'client-side' export type HogFunctionSubTemplateIdType = 'early_access_feature_enrollment' | 'survey_response' export type HogFunctionConfigurationType = Omit< diff --git a/mypy-baseline.txt b/mypy-baseline.txt index 9654cdb251bef..23e23e7e67ebe 100644 --- a/mypy-baseline.txt +++ b/mypy-baseline.txt @@ -1,4 +1,3 @@ -posthog/tasks/exports/ordered_csv_renderer.py:0: error: No return value expected [return-value] posthog/warehouse/models/ssh_tunnel.py:0: error: Incompatible types in assignment (expression has type "NoEncryption", variable has type "BestAvailableEncryption") [assignment] posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Statement is unreachable [unreachable] posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Non-overlapping equality check (left operand type: "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'json', 'decimal', 'wei', 'date', 'time'] | None", right operand type: "Literal['interval']") [comparison-overlap] @@ -121,7 +120,6 @@ posthog/hogql/database/schema/groups.py:0: note: Consider using "Mapping" instea posthog/hogql/database/schema/persons.py:0: error: Incompatible types in assignment (expression has type "Organization | None", variable has type "Organization") [assignment] posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] posthog/models/team/team.py:0: error: Statement is unreachable [unreachable] -posthog/models/hog_functions/hog_function.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "type[User]", base class "BaseManager" defined the type as "type[_T]") [assignment] posthog/models/user.py:0: error: Cannot override class variable (previously declared on base class "AbstractBaseUser") with instance variable [misc] posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "None", base class "AbstractUser" defined the type as "CharField[str | int | Combinable, str]") [assignment] @@ -155,6 +153,7 @@ posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict ent posthog/hogql_queries/legacy_compatibility/filter_to_query.py:0: error: Dict entry 0 has incompatible type "str": "StickinessFilter"; expected "str": "TrendsFilter" [dict-item] posthog/session_recordings/models/session_recording.py:0: error: Argument "distinct_id" to "MissingPerson" has incompatible type "str | None"; expected "str" [arg-type] posthog/session_recordings/models/session_recording.py:0: error: Incompatible type for lookup 'persondistinctid__team_id': (got "Team", expected "str | int") [misc] +posthog/models/hog_functions/hog_function.py:0: error: Argument 1 to "get" of "dict" has incompatible type "str | None"; expected "str" [arg-type] ee/tasks/subscriptions/slack_subscriptions.py:0: error: Item "None" of "datetime | None" has no attribute "strftime" [union-attr] posthog/warehouse/models/table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_key" [union-attr] posthog/warehouse/models/table.py:0: error: Item "None" of "DataWarehouseCredential | None" has no attribute "access_secret" [union-attr] @@ -348,12 +347,12 @@ posthog/hogql/query.py:0: error: Argument 1 to "get_default_limit_for_context" h posthog/hogql/query.py:0: error: Subclass of "SelectQuery" and "SelectSetQuery" cannot exist: would have incompatible method signatures [unreachable] posthog/api/organization.py:0: error: Incompatible return value type (got "int | None", expected "Level | None") [return-value] posthog/queries/person_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] -posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/queries/event_query/event_query.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql_queries/sessions_timeline_query_runner.py:0: error: Statement is unreachable [unreachable] posthog/hogql_queries/hogql_query_runner.py:0: error: Statement is unreachable [unreachable] posthog/hogql_queries/hogql_query_runner.py:0: error: Incompatible return value type (got "SelectQuery | SelectSetQuery", expected "SelectQuery") [return-value] posthog/hogql_queries/events_query_runner.py:0: error: Statement is unreachable [unreachable] +posthog/api/action.py:0: error: Argument 1 to has incompatible type "*tuple[str, ...]"; expected "type[BaseRenderer]" [arg-type] posthog/queries/breakdown_props.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type] posthog/queries/breakdown_props.py:0: error: Incompatible type for lookup 'pk': (got "str | None", expected "str | int") [misc] posthog/queries/breakdown_props.py:0: error: Incompatible return value type (got "str | None", expected "str") [return-value] @@ -411,7 +410,6 @@ posthog/api/survey.py:0: error: Incompatible types in assignment (expression has posthog/api/survey.py:0: error: Item "list[_ErrorFullDetails]" of "_FullDetailDict | list[_ErrorFullDetails] | dict[str, _ErrorFullDetails]" has no attribute "get" [union-attr] posthog/api/survey.py:0: error: Item "object" of "object | Any" has no attribute "__iter__" (not iterable) [union-attr] posthog/hogql_queries/web_analytics/web_overview.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] -posthog/hogql_queries/web_analytics/top_clicks.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/user.py:0: error: Module has no attribute "utc" [attr-defined] posthog/api/user.py:0: error: Module has no attribute "utc" [attr-defined] posthog/api/user.py:0: error: "User" has no attribute "social_auth" [attr-defined] @@ -575,8 +573,6 @@ posthog/api/test/test_signup.py:0: error: Module "django.utils.timezone" does no posthog/api/test/test_signup.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/test/test_preflight.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] posthog/api/test/test_preflight.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "datetime" [attr-defined] -posthog/api/test/test_personal_api_keys.py:0: error: Item "None" of "str | None" has no attribute "startswith" [union-attr] -posthog/api/test/test_personal_api_keys.py:0: error: Item "None" of "str | None" has no attribute "startswith" [union-attr] posthog/api/test/test_person.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] posthog/api/test/test_organization_domain.py:0: error: Item "None" of "datetime | None" has no attribute "strftime" [union-attr] posthog/api/signup.py:0: error: Argument 1 to "create_user" of "UserManager" has incompatible type "str | None"; expected "str" [arg-type] @@ -723,7 +719,8 @@ posthog/helpers/full_text_search.py:0: error: Incompatible return value type (go posthog/helpers/full_text_search.py:0: error: Argument 1 to "reduce" has incompatible type "Callable[[SearchVector, SearchVector], CombinedExpression]"; expected "Callable[[SearchVector, SearchVector], SearchVector]" [arg-type] posthog/helpers/full_text_search.py:0: error: Incompatible return value type (got "CombinedExpression", expected "SearchVector") [return-value] posthog/async_migrations/test/test_runner.py:0: error: Item "None" of "datetime | None" has no attribute "day" [union-attr] -posthog/api/test/test_survey.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "active" [union-attr] +posthog/api/test/test_personal_api_keys.py:0: error: Item "None" of "str | None" has no attribute "startswith" [union-attr] +posthog/api/test/test_personal_api_keys.py:0: error: Item "None" of "str | None" has no attribute "startswith" [union-attr] posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] posthog/api/test/test_insight.py:0: error: Argument "data" to "get" of "APIClient" has incompatible type "dict[str, object]"; expected "Mapping[str, str | bytes | int | Iterable[str | bytes | int]] | Iterable[tuple[str, str | bytes | int | Iterable[str | bytes | int]]] | None" [arg-type] @@ -805,6 +802,7 @@ posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "Ap posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type] posthog/queries/app_metrics/historical_exports.py:0: error: Argument 1 to "loads" has incompatible type "str | None"; expected "str | bytes | bytearray" [arg-type] +posthog/api/test/test_survey.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "active" [union-attr] posthog/api/test/test_decide.py:0: error: Item "None" of "User | None" has no attribute "toolbar_mode" [union-attr] posthog/api/test/test_decide.py:0: error: Item "None" of "User | None" has no attribute "save" [union-attr] posthog/api/test/test_authentication.py:0: error: Module has no attribute "utc" [attr-defined] diff --git a/package.json b/package.json index 1c72ce1483e2c..fc88d73076ccf 100644 --- a/package.json +++ b/package.json @@ -160,7 +160,7 @@ "pmtiles": "^2.11.0", "postcss": "^8.4.31", "postcss-preset-env": "^9.3.0", - "posthog-js": "1.194.0", + "posthog-js": "1.194.3", "posthog-js-lite": "3.0.0", "prettier": "^2.8.8", "prop-types": "^15.7.2", diff --git a/plugin-server/src/cdp/hog-executor.ts b/plugin-server/src/cdp/hog-executor.ts index 973163e758582..15e147f022b7f 100644 --- a/plugin-server/src/cdp/hog-executor.ts +++ b/plugin-server/src/cdp/hog-executor.ts @@ -305,6 +305,8 @@ export class HogExecutor { ) : invocation.hogFunction.bytecode) + const eventId = invocation?.globals?.event?.uuid || 'Unknown event' + try { let hogLogs = 0 execRes = execHog(invocationInput, { @@ -354,7 +356,7 @@ export class HogExecutor { result.logs.push({ level: 'warn', timestamp: DateTime.now(), - message: `Function exceeded maximum log entries. No more logs will be collected.`, + message: `Function exceeded maximum log entries. No more logs will be collected. Event: ${eventId}`, }) } @@ -410,7 +412,7 @@ export class HogExecutor { result.logs.push({ level: 'error', timestamp: DateTime.now(), - message: `Error executing function: ${e}`, + message: `Error executing function on event ${eventId}: ${e}`, }) throw e } @@ -436,7 +438,7 @@ export class HogExecutor { timestamp: DateTime.now(), message: `Suspending function due to async function call '${execRes.asyncFunctionName}'. Payload: ${ calculateCost(execRes.state) + calculateCost(args) - } bytes`, + } bytes. Event: ${eventId}`, }) if (execRes.asyncFunctionName) { diff --git a/plugin-server/src/cdp/utils.ts b/plugin-server/src/cdp/utils.ts index 6546db471d88f..f4c09b602a514 100644 --- a/plugin-server/src/cdp/utils.ts +++ b/plugin-server/src/cdp/utils.ts @@ -135,12 +135,16 @@ export function convertToHogFunctionFilterGlobal(globals: HogFunctionInvocationG for (const [_groupType, group] of Object.entries(globals.groups || {})) { groups[`group_${group.index}`] = { + key: group.id, + index: group.index, properties: group.properties, } + groups[_groupType] = groups[`group_${group.index}`] } const elementsChain = globals.event.elements_chain ?? globals.event.properties['$elements_chain'] const response = { + ...groups, event: globals.event.event, elements_chain: elementsChain, elements_chain_href: '', @@ -158,7 +162,6 @@ export function convertToHogFunctionFilterGlobal(globals: HogFunctionInvocationG } : undefined, distinct_id: globals.event.distinct_id, - ...groups, } satisfies HogFunctionFilterGlobals // The elements_chain_* fields are stored as materialized columns in ClickHouse. diff --git a/plugin-server/src/utils/utils.ts b/plugin-server/src/utils/utils.ts index bae7b6e78bc38..ccaf793c21c93 100644 --- a/plugin-server/src/utils/utils.ts +++ b/plugin-server/src/utils/utils.ts @@ -544,7 +544,6 @@ export const KNOWN_LIB_VALUES = new Set([ 'posthog-python', '', 'js', - 'posthog-js-lite', 'posthog-node', 'posthog-react-native', 'posthog-ruby', diff --git a/plugin-server/tests/cdp/cdp-api.test.ts b/plugin-server/tests/cdp/cdp-api.test.ts index 25599334cbc0f..0677c30add0d6 100644 --- a/plugin-server/tests/cdp/cdp-api.test.ts +++ b/plugin-server/tests/cdp/cdp-api.test.ts @@ -174,7 +174,8 @@ describe('CDP API', () => { }, { level: 'debug', - message: "Suspending function due to async function call 'fetch'. Payload: 2110 bytes", + message: + "Suspending function due to async function call 'fetch'. Payload: 2110 bytes. Event: b3a1fe86-b10c-43cc-acaf-d208977608d0", }, { level: 'info', @@ -223,7 +224,8 @@ describe('CDP API', () => { }, { level: 'debug', - message: "Suspending function due to async function call 'fetch'. Payload: 2110 bytes", + message: + "Suspending function due to async function call 'fetch'. Payload: 2110 bytes. Event: b3a1fe86-b10c-43cc-acaf-d208977608d0", }, { level: 'debug', diff --git a/plugin-server/tests/cdp/cdp-processed-events-consumer.test.ts b/plugin-server/tests/cdp/cdp-processed-events-consumer.test.ts index f86cec16a0bc2..4bd6eb339c5cf 100644 --- a/plugin-server/tests/cdp/cdp-processed-events-consumer.test.ts +++ b/plugin-server/tests/cdp/cdp-processed-events-consumer.test.ts @@ -172,7 +172,8 @@ describe('CDP Processed Events Consumer', () => { { topic: 'log_entries_test', value: { - message: "Suspending function due to async function call 'fetch'. Payload: 2035 bytes", + message: + "Suspending function due to async function call 'fetch'. Payload: 2035 bytes. Event: b3a1fe86-b10c-43cc-acaf-d208977608d0", log_source_id: fnFetchNoFilters.id, }, }, diff --git a/plugin-server/tests/cdp/hog-executor.test.ts b/plugin-server/tests/cdp/hog-executor.test.ts index 7285e2dbe10a0..aeacc1067d0f4 100644 --- a/plugin-server/tests/cdp/hog-executor.test.ts +++ b/plugin-server/tests/cdp/hog-executor.test.ts @@ -112,7 +112,7 @@ describe('Hog Executor', () => { { timestamp: expect.any(DateTime), level: 'debug', - message: "Suspending function due to async function call 'fetch'. Payload: 1951 bytes", + message: "Suspending function due to async function call 'fetch'. Payload: 1951 bytes. Event: uuid", }, ]) }) @@ -193,7 +193,7 @@ describe('Hog Executor', () => { expect(logs.map((log) => log.message)).toMatchInlineSnapshot(` Array [ "Executing function", - "Suspending function due to async function call 'fetch'. Payload: 1951 bytes", + "Suspending function due to async function call 'fetch'. Payload: 1951 bytes. Event: uuid", "Resuming function", "Fetch response:, {\\"status\\":200,\\"body\\":\\"success\\"}", "Function completed in 100ms. Sync: 0ms. Mem: 812 bytes. Ops: 22. Event: 'http://localhost:8000/events/1'", @@ -212,7 +212,7 @@ describe('Hog Executor', () => { expect(logs.map((log) => log.message)).toMatchInlineSnapshot(` Array [ "Executing function", - "Suspending function due to async function call 'fetch'. Payload: 1951 bytes", + "Suspending function due to async function call 'fetch'. Payload: 1951 bytes. Event: uuid", "Resuming function", "Fetch response:, {\\"status\\":200,\\"body\\":{\\"foo\\":\\"bar\\"}}", "Function completed in 100ms. Sync: 0ms. Mem: 812 bytes. Ops: 22. Event: 'http://localhost:8000/events/1'", @@ -243,7 +243,7 @@ describe('Hog Executor', () => { expect(logs.map((log) => log.message)).toMatchInlineSnapshot(` Array [ "Executing function", - "Suspending function due to async function call 'fetch'. Payload: 1951 bytes", + "Suspending function due to async function call 'fetch'. Payload: 1951 bytes. Event: uuid", "Fetch failed after 1 attempts", "Fetch failure of kind failurestatus with status 404 and message 404 Not Found", "Resuming function", @@ -562,7 +562,7 @@ describe('Hog Executor', () => { expect(result3.error).toEqual('Exceeded maximum number of async steps: 5') expect(result3.logs.map((log) => log.message)).toEqual([ 'Resuming function', - 'Error executing function: HogVMException: Exceeded maximum number of async steps: 5', + 'Error executing function on event uuid: HogVMException: Exceeded maximum number of async steps: 5', ]) }) }) @@ -610,9 +610,9 @@ describe('Hog Executor', () => { 'I AM FIBONACCI', 'I AM FIBONACCI', 'I AM FIBONACCI', - 'Function exceeded maximum log entries. No more logs will be collected.', + 'Function exceeded maximum log entries. No more logs will be collected. Event: uuid', expect.stringContaining( - 'Error executing function: HogVMException: Execution timed out after 0.1 seconds. Performed' + 'Error executing function on event uuid: HogVMException: Execution timed out after 0.1 seconds. Performed' ), ]) }) diff --git a/plugin-server/tests/cdp/utils.test.ts b/plugin-server/tests/cdp/utils.test.ts index 6640662b2e79e..c343f8e6461a1 100644 --- a/plugin-server/tests/cdp/utils.test.ts +++ b/plugin-server/tests/cdp/utils.test.ts @@ -1,7 +1,12 @@ import { DateTime } from 'luxon' -import { HogFunctionInvocationResult } from '../../src/cdp/types' -import { gzipObject, prepareLogEntriesForClickhouse, unGzipObject } from '../../src/cdp/utils' +import { HogFunctionInvocationGlobals, HogFunctionInvocationResult } from '../../src/cdp/types' +import { + convertToHogFunctionFilterGlobal, + gzipObject, + prepareLogEntriesForClickhouse, + unGzipObject, +} from '../../src/cdp/utils' import { createHogFunction, createInvocation, insertHogFunction as _insertHogFunction } from './fixtures' describe('Utils', () => { @@ -92,4 +97,65 @@ describe('Utils', () => { `) }) }) + + describe('convertToHogFunctionFilterGlobal', () => { + it('should correctly map groups to response', () => { + const globals: HogFunctionInvocationGlobals = { + project: { + id: 1, + name: 'Test Project', + url: 'http://example.com', + }, + event: { + uuid: 'event_uuid', + event: 'test_event', + distinct_id: 'user_123', + properties: {}, + elements_chain: '', + timestamp: DateTime.now().toISO(), + url: 'http://example.com/event', + }, + person: { + id: 'person_123', + properties: {}, + name: 'Test User', + url: 'http://example.com/person', + }, + groups: { + organization: { + id: 'org_123', + type: 'organization', + index: 0, + properties: { name: 'Acme Corp' }, + url: 'http://example.com/org', + }, + project: { + id: 'proj_456', + type: 'project', + index: 1, + properties: { name: 'Project X' }, + url: 'http://example.com/project', + }, + }, + } + + const response = convertToHogFunctionFilterGlobal(globals) + + // Verify that group_0 and organization are set correctly + expect(response['group_0']).toEqual({ + key: 'org_123', + index: 0, + properties: { name: 'Acme Corp' }, + }) + expect(response['organization']).toBe(response['group_0']) + + // Verify that group_1 and project are set correctly + expect(response['group_1']).toEqual({ + key: 'proj_456', + index: 1, + properties: { name: 'Project X' }, + }) + expect(response['project']).toBe(response['group_1']) + }) + }) }) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e692b7738c7d6..46b9d8ad2f29c 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -302,8 +302,8 @@ dependencies: specifier: ^9.3.0 version: 9.3.0(postcss@8.4.31) posthog-js: - specifier: 1.194.0 - version: 1.194.0 + specifier: 1.194.3 + version: 1.194.3 posthog-js-lite: specifier: 3.0.0 version: 3.0.0 @@ -6952,8 +6952,8 @@ packages: lodash: 4.17.21 dev: true - /@storybook/csf@0.1.11: - resolution: {integrity: sha512-dHYFQH3mA+EtnCkHXzicbLgsvzYjcDJ1JWsogbItZogkPHgSJM/Wr71uMkcvw8v9mmCyP4NpXJuu6bPoVsOnzg==} + /@storybook/csf@0.1.12: + resolution: {integrity: sha512-9/exVhabisyIVL0VxTCxo01Tdm8wefIXKXfltAPTSr8cbLn5JAxGQ6QV3mjdecLGEOucfoVhAKtJfVHxEK1iqw==} dependencies: type-fest: 2.19.0 dev: true @@ -6993,7 +6993,7 @@ packages: '@storybook/channels': 7.6.20 '@storybook/client-logger': 7.6.20 '@storybook/core-events': 7.6.20 - '@storybook/csf': 0.1.11 + '@storybook/csf': 0.1.12 '@storybook/global': 5.0.0 '@storybook/router': 7.6.20 '@storybook/theming': 7.6.20(react-dom@18.2.0)(react@18.2.0) @@ -7108,7 +7108,7 @@ packages: '@storybook/channels': 7.6.20 '@storybook/client-logger': 7.6.20 '@storybook/core-events': 7.6.20 - '@storybook/csf': 0.1.11 + '@storybook/csf': 0.1.12 '@storybook/global': 5.0.0 '@storybook/types': 7.6.20 '@types/qs': 6.9.17 @@ -14003,7 +14003,7 @@ packages: hogan.js: 3.0.2 htm: 3.1.1 instantsearch-ui-components: 0.3.0 - preact: 10.25.0 + preact: 10.25.1 qs: 6.9.7 search-insights: 2.13.0 dev: false @@ -17822,12 +17822,12 @@ packages: resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==} dev: false - /posthog-js@1.194.0: - resolution: {integrity: sha512-9yjYjHYlT6hh6/OlEJmz/wEjMZI1cRghxmYirwq60uHK93nbbxFP1VE7LqjWFLM8jSTXRw//lpU23EvU7QjvxQ==} + /posthog-js@1.194.3: + resolution: {integrity: sha512-/YFpBMqZzRpywa07QeoaIojdrUDijFajT4gZBSCFUBuZA5BN5xr5S1spsvtpT7E4RjkQSVgRvUngI4W19csgQw==} dependencies: core-js: 3.39.0 fflate: 0.4.8 - preact: 10.25.0 + preact: 10.25.1 web-vitals: 4.2.4 dev: false @@ -17835,8 +17835,8 @@ packages: resolution: {integrity: sha512-Q+/tYsFU9r7xoOJ+y/ZTtdVQwTWfzjbiXBDMM/JKUux3+QPP02iUuIoeBQ+Ot6oEDlC+/PGjB/5A3K7KKb7hcw==} dev: false - /preact@10.25.0: - resolution: {integrity: sha512-6bYnzlLxXV3OSpUxLdaxBmE7PMOu0aR3pG6lryK/0jmvcDFPlcXGQAt5DpK3RITWiDrfYZRI0druyaK/S9kYLg==} + /preact@10.25.1: + resolution: {integrity: sha512-frxeZV2vhQSohQwJ7FvlqC40ze89+8friponWUFeVEkaCfhC6Eu4V0iND5C9CXz8JLndV07QRDeXzH1+Anz5Og==} dev: false /prelude-ls@1.2.1: diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 2a9995ee88df8..c636ac58fcd9e 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -31,7 +31,7 @@ from posthog.models.feature_flag.flag_analytics import increment_request_count from posthog.models.filters.mixins.utils import process_bool from posthog.models.utils import execute_with_timeout -from posthog.plugins.site import get_decide_site_apps +from posthog.plugins.site import get_decide_site_apps, get_decide_site_functions from posthog.utils import ( get_ip_address, label_for_team_id_to_track, @@ -300,6 +300,8 @@ def get_decide(request: HttpRequest): try: with execute_with_timeout(200, DATABASE_FOR_FLAG_MATCHING): site_apps = get_decide_site_apps(team, using_database=DATABASE_FOR_FLAG_MATCHING) + with execute_with_timeout(200, DATABASE_FOR_FLAG_MATCHING): + site_apps += get_decide_site_functions(team, using_database=DATABASE_FOR_FLAG_MATCHING) except Exception: pass diff --git a/posthog/api/hog_function.py b/posthog/api/hog_function.py index 3d82cb367ef8c..339400a45aa78 100644 --- a/posthog/api/hog_function.py +++ b/posthog/api/hog_function.py @@ -18,13 +18,22 @@ from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.api.shared import UserBasicSerializer -from posthog.cdp.filters import compile_filters_bytecode +from posthog.cdp.filters import compile_filters_bytecode, compile_filters_expr from posthog.cdp.services.icons import CDPIconsService from posthog.cdp.templates import HOG_FUNCTION_TEMPLATES_BY_ID from posthog.cdp.validation import compile_hog, generate_template_bytecode, validate_inputs, validate_inputs_schema +from posthog.cdp.site_functions import get_transpiled_function from posthog.constants import AvailableFeature +from posthog.hogql.compiler.javascript import JavaScriptCompiler from posthog.models.activity_logging.activity_log import log_activity, changes_between, Detail -from posthog.models.hog_functions.hog_function import HogFunction, HogFunctionState, TYPES_WITH_COMPILED_FILTERS +from posthog.models.hog_functions.hog_function import ( + HogFunction, + HogFunctionState, + TYPES_WITH_COMPILED_FILTERS, + TYPES_WITH_TRANSPILED_FILTERS, + TYPES_WITH_JAVASCRIPT_SOURCE, +) +from posthog.models.plugin import TranspilerError from posthog.plugins.plugin_server_api import create_hog_invocation_test @@ -93,6 +102,7 @@ class Meta: "deleted", "hog", "bytecode", + "transpiled", "inputs_schema", "inputs", "filters", @@ -108,6 +118,7 @@ class Meta: "created_by", "updated_at", "bytecode", + "transpiled", "template", "status", ] @@ -135,7 +146,7 @@ def validate(self, attrs): {"template_id": "The Data Pipelines addon is required to create custom functions."} ) - if template.status != "free": + if template.status != "free" and not instance: raise serializers.ValidationError( {"template_id": "The Data Pipelines addon is required for this template."} ) @@ -144,6 +155,9 @@ def validate(self, attrs): attrs["inputs_schema"] = template.inputs_schema attrs["hog"] = template.hog + if "type" not in attrs: + attrs["type"] = "destination" + if self.context.get("view") and self.context["view"].action == "create": # Ensure we have sensible defaults when created attrs["filters"] = attrs.get("filters") or {} @@ -161,16 +175,36 @@ def validate(self, attrs): existing_encrypted_inputs = instance.encrypted_inputs attrs["inputs_schema"] = attrs.get("inputs_schema", instance.inputs_schema if instance else []) - attrs["inputs"] = validate_inputs(attrs["inputs_schema"], inputs, existing_encrypted_inputs) + attrs["inputs"] = validate_inputs(attrs["inputs_schema"], inputs, existing_encrypted_inputs, attrs["type"]) + + if "filters" in attrs: + if attrs["type"] in TYPES_WITH_COMPILED_FILTERS: + attrs["filters"] = compile_filters_bytecode(attrs["filters"], team) + elif attrs["type"] in TYPES_WITH_TRANSPILED_FILTERS: + compiler = JavaScriptCompiler() + code = compiler.visit(compile_filters_expr(attrs["filters"], team)) + attrs["filters"]["transpiled"] = {"lang": "ts", "code": code, "stl": list(compiler.stl_functions)} + if "bytecode" in attrs["filters"]: + del attrs["filters"]["bytecode"] if "hog" in attrs: - attrs["bytecode"] = compile_hog(attrs["hog"]) - - if "type" not in attrs: - attrs["type"] = "destination" - - if "filters" in attrs and attrs["type"] in TYPES_WITH_COMPILED_FILTERS: - attrs["filters"] = compile_filters_bytecode(attrs["filters"], team) + if attrs["type"] in TYPES_WITH_JAVASCRIPT_SOURCE: + # Upon creation, this code will be run before the model has an "id". + # If that's the case, the code just makes sure transpilation doesn't throw. We'll re-transpile after creation. + id = str(instance.id) if instance else "__" + try: + attrs["transpiled"] = get_transpiled_function( + id, attrs["hog"], attrs["filters"], attrs["inputs"], team + ) + except TranspilerError: + raise serializers.ValidationError({"hog": f"Error in TypeScript code"}) + attrs["bytecode"] = None + else: + attrs["bytecode"] = compile_hog(attrs["hog"]) + attrs["transpiled"] = None + else: + attrs["bytecode"] = None + attrs["transpiled"] = None return super().validate(attrs) @@ -196,7 +230,13 @@ def to_representation(self, data): def create(self, validated_data: dict, *args, **kwargs) -> HogFunction: request = self.context["request"] validated_data["created_by"] = request.user - return super().create(validated_data=validated_data) + hog_function = super().create(validated_data=validated_data) + if validated_data.get("type") in TYPES_WITH_JAVASCRIPT_SOURCE: + # Re-run the transpilation now that we have an ID + hog_function.transpiled = get_transpiled_function( + str(hog_function.id), hog_function.hog, hog_function.filters, hog_function.inputs, hog_function.team + ) + return hog_function def update(self, instance: HogFunction, validated_data: dict, *args, **kwargs) -> HogFunction: res: HogFunction = super().update(instance, validated_data) @@ -231,8 +271,13 @@ def get_serializer_class(self) -> type[BaseSerializer]: def safely_get_queryset(self, queryset: QuerySet) -> QuerySet: if self.action == "list": - type = self.request.GET.get("type", "destination") - queryset = queryset.filter(deleted=False, type=type) + if "type" in self.request.GET: + types = [self.request.GET.get("type", "destination")] + elif "types" in self.request.GET: + types = self.request.GET.get("types", "destination").split(",") + else: + types = ["destination"] + queryset = queryset.filter(deleted=False, type__in=types) if self.request.GET.get("filters"): try: diff --git a/posthog/api/hog_function_template.py b/posthog/api/hog_function_template.py index 2f68614e50a02..2044affa77075 100644 --- a/posthog/api/hog_function_template.py +++ b/posthog/api/hog_function_template.py @@ -33,8 +33,12 @@ class PublicHogFunctionTemplateViewSet(viewsets.GenericViewSet): serializer_class = HogFunctionTemplateSerializer def list(self, request: Request, *args, **kwargs): - type = self.request.GET.get("type", "destination") - templates = [item for item in HOG_FUNCTION_TEMPLATES if item.type == type] + types = ["destination"] + if "type" in request.GET: + types = [self.request.GET.get("type", "destination")] + elif "types" in request.GET: + types = self.request.GET.get("types", "destination").split(",") + templates = [item for item in HOG_FUNCTION_TEMPLATES if item.type in types] page = self.paginate_queryset(templates) serializer = self.get_serializer(page, many=True) return self.get_paginated_response(serializer.data) diff --git a/posthog/api/proxy_record.py b/posthog/api/proxy_record.py index 42e664f4989ee..552835aa9562f 100644 --- a/posthog/api/proxy_record.py +++ b/posthog/api/proxy_record.py @@ -1,12 +1,15 @@ import asyncio import hashlib +import posthoganalytics from django.conf import settings from rest_framework import serializers, status from rest_framework.viewsets import ModelViewSet from posthog.api.routing import TeamAndOrgViewSetMixin from posthog.constants import GENERAL_PURPOSE_TASK_QUEUE +from posthog.event_usage import groups from posthog.models import ProxyRecord +from posthog.models.organization import Organization from posthog.permissions import OrganizationAdminWritePermissions from posthog.temporal.common.client import sync_connect from posthog.temporal.proxy_service import CreateManagedProxyInputs, DeleteManagedProxyInputs @@ -22,6 +25,20 @@ def generate_target_cname(organization_id, domain) -> str: return f"{digest}.{settings.PROXY_BASE_CNAME}" +def _capture_proxy_event(request, record: ProxyRecord, event_type: str) -> None: + organization = Organization.objects.get(id=record.organization_id) + posthoganalytics.capture( + request.user.distinct_id, + f"managed reverse proxy {event_type}", + properties={ + "proxy_record_id": record.id, + "domain": record.domain, + "target_cname": record.target_cname, + }, + groups=groups(organization), + ) + + class ProxyRecordSerializer(serializers.ModelSerializer): class Meta: model = ProxyRecord @@ -76,6 +93,7 @@ def create(self, request, *args, **kwargs): ) serializer = self.get_serializer(record) + _capture_proxy_event(request, record, "created") return Response(serializer.data) def destroy(self, request, *args, pk=None, **kwargs): @@ -106,6 +124,8 @@ def destroy(self, request, *args, pk=None, **kwargs): record.status = ProxyRecord.Status.DELETING record.save() + _capture_proxy_event(request, record, "deleted") + return Response( {"success": True}, status=status.HTTP_200_OK, diff --git a/posthog/api/site_app.py b/posthog/api/site_app.py index 6704ff0c7f534..40e1df08ea778 100644 --- a/posthog/api/site_app.py +++ b/posthog/api/site_app.py @@ -8,6 +8,7 @@ from posthog.exceptions import generate_exception_response from posthog.logging.timing import timed +from posthog.models.hog_functions.hog_function import HogFunction from posthog.plugins.site import get_site_config_from_schema, get_transpiled_site_source @@ -36,3 +37,34 @@ def get_site_app(request: HttpRequest, id: int, token: str, hash: str) -> HttpRe type="server_error", status_code=status.HTTP_404_NOT_FOUND, ) + + +@csrf_exempt +@timed("posthog_cloud_site_app_endpoint") +def get_site_function(request: HttpRequest, id: str, hash: str) -> HttpResponse: + try: + # TODO: Should we add a token as well? Is the UUID enough? + function = ( + HogFunction.objects.filter( + id=id, enabled=True, type__in=("site_destination", "site_app"), transpiled__isnull=False + ) + .values_list("transpiled") + .first() + ) + if not function: + raise Exception("No function found") + + response = HttpResponse(content=function[0], content_type="application/javascript") + response["Cache-Control"] = "public, max-age=31536000" # Cache for 1 year + statsd.incr(f"posthog_cloud_raw_endpoint_success", tags={"endpoint": "site_function"}) + return response + except Exception as e: + capture_exception(e, {"data": {"id": id}}) + statsd.incr("posthog_cloud_raw_endpoint_failure", tags={"endpoint": "site_function"}) + return generate_exception_response( + "site_function", + "Unable to serve site function source code.", + code="missing_site_function_source", + type="server_error", + status_code=status.HTTP_404_NOT_FOUND, + ) diff --git a/posthog/api/test/__snapshots__/test_action.ambr b/posthog/api/test/__snapshots__/test_action.ambr index 2b453c3a24b20..8ac1823a033c1 100644 --- a/posthog/api/test/__snapshots__/test_action.ambr +++ b/posthog/api/test/__snapshots__/test_action.ambr @@ -156,12 +156,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '99' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '99' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -471,12 +471,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '99' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '99' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -667,12 +667,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '99' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '99' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL diff --git a/posthog/api/test/__snapshots__/test_annotation.ambr b/posthog/api/test/__snapshots__/test_annotation.ambr index 457607fc74fc8..9340e03a2a4d8 100644 --- a/posthog/api/test/__snapshots__/test_annotation.ambr +++ b/posthog/api/test/__snapshots__/test_annotation.ambr @@ -144,12 +144,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '107' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '107' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -454,12 +454,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '107' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '107' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -669,12 +669,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '107' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '107' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr index a7be20804014f..a1f1ebcced5c8 100644 --- a/posthog/api/test/__snapshots__/test_decide.ambr +++ b/posthog/api/test/__snapshots__/test_decide.ambr @@ -260,6 +260,7 @@ "posthog_hogfunction"."icon_url", "posthog_hogfunction"."hog", "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", "posthog_hogfunction"."inputs_schema", "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", @@ -510,6 +511,19 @@ AND "posthog_pluginconfig"."team_id" = 99999) ''' # --- +# name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.25 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."type" + FROM "posthog_hogfunction" + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."transpiled" IS NOT NULL + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- # name: TestDecide.test_decide_doesnt_error_out_when_database_is_down.3 ''' SELECT "posthog_team"."id", @@ -740,12 +754,12 @@ INNER JOIN "posthog_team" ON ("ee_accesscontrol"."team_id" = "posthog_team"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '253' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '253' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -785,6 +799,7 @@ "posthog_hogfunction"."icon_url", "posthog_hogfunction"."hog", "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", "posthog_hogfunction"."inputs_schema", "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", @@ -1082,6 +1097,7 @@ "posthog_hogfunction"."icon_url", "posthog_hogfunction"."hog", "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", "posthog_hogfunction"."inputs_schema", "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", @@ -1473,6 +1489,7 @@ "posthog_hogfunction"."icon_url", "posthog_hogfunction"."hog", "posthog_hogfunction"."bytecode", + "posthog_hogfunction"."transpiled", "posthog_hogfunction"."inputs_schema", "posthog_hogfunction"."inputs", "posthog_hogfunction"."encrypted_inputs", @@ -1580,3 +1597,16 @@ AND "posthog_pluginconfig"."team_id" = 99999) ''' # --- +# name: TestDecide.test_web_app_queries.6 + ''' + SELECT "posthog_hogfunction"."id", + "posthog_hogfunction"."updated_at", + "posthog_hogfunction"."type" + FROM "posthog_hogfunction" + WHERE ("posthog_hogfunction"."enabled" + AND "posthog_hogfunction"."team_id" = 99999 + AND "posthog_hogfunction"."transpiled" IS NOT NULL + AND "posthog_hogfunction"."type" IN ('site_destination', + 'site_app')) + ''' +# --- diff --git a/posthog/api/test/__snapshots__/test_element.ambr b/posthog/api/test/__snapshots__/test_element.ambr index 414a8a1831062..e3ce7d60cebca 100644 --- a/posthog/api/test/__snapshots__/test_element.ambr +++ b/posthog/api/test/__snapshots__/test_element.ambr @@ -151,12 +151,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '272' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '272' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index 93dfa76ea2cdb..b51af7a796f7d 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -2001,12 +2001,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '313' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '313' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -2021,12 +2021,12 @@ AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'feature_flag' - AND "ee_accesscontrol"."resource_id" = '130' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'feature_flag' - AND "ee_accesscontrol"."resource_id" = '130' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999)) ''' diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr index b6d0b7945dd2d..01390b5f4b341 100644 --- a/posthog/api/test/__snapshots__/test_insight.ambr +++ b/posthog/api/test/__snapshots__/test_insight.ambr @@ -1380,12 +1380,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '441' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '441' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -1493,12 +1493,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '441' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '441' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index 7d964fa88087f..dfd916657a89b 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -332,12 +332,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -352,12 +352,12 @@ AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '1' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '1' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999)) ''' @@ -1057,12 +1057,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -2298,12 +2298,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -2402,12 +2402,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -2530,52 +2530,52 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '55' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '55' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '56' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '56' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '57' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '57' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '58' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '58' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '59' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '59' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999)) ''' @@ -3459,12 +3459,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -4634,12 +4634,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -4654,12 +4654,12 @@ AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '60' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '60' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999)) ''' @@ -5590,12 +5590,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -6140,12 +6140,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -6220,12 +6220,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -7580,12 +7580,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -8592,12 +8592,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -9328,12 +9328,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -9348,12 +9348,12 @@ AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '67' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '67' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999)) ''' @@ -10348,12 +10348,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -10898,12 +10898,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -10978,12 +10978,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '76' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -11186,22 +11186,22 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '69' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '69' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '70' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'dashboard' - AND "ee_accesscontrol"."resource_id" = '70' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999)) ''' diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr index 6527545701a76..f585776717839 100644 --- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr +++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr @@ -110,12 +110,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '83' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '83' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -552,12 +552,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '83' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '83' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -719,12 +719,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '83' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '83' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index f2201798366e3..5af8e61dc3068 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -38,6 +38,7 @@ from posthog.models.cohort.cohort import Cohort from posthog.models.feature_flag.feature_flag import FeatureFlagHashKeyOverride from posthog.models.group.group import Group +from posthog.models.hog_functions.hog_function import HogFunction from posthog.models.organization import Organization, OrganizationMembership from posthog.models.person import PersonDistinctId from posthog.models.personal_api_key import hash_key_value @@ -663,7 +664,7 @@ def test_web_app_queries(self, *args): # caching flag definitions in the above mean fewer queries # 3 of these queries are just for setting transaction scope - with self.assertNumQueries(4): + with self.assertNumQueries(8): response = self._post_decide() self.assertEqual(response.status_code, status.HTTP_200_OK) injected = response.json()["siteApps"] @@ -688,13 +689,52 @@ def test_site_app_injection(self, *args): ) self.team.refresh_from_db() self.assertTrue(self.team.inject_web_apps) - with self.assertNumQueries(5): + with self.assertNumQueries(9): response = self._post_decide() self.assertEqual(response.status_code, status.HTTP_200_OK) injected = response.json()["siteApps"] self.assertEqual(len(injected), 1) self.assertTrue(injected[0]["url"].startswith(f"/site_app/{plugin_config.id}/{plugin_config.web_token}/")) + def test_site_function_injection(self, *args): + # yype: site_app + site_app = HogFunction.objects.create( + team=self.team, + name="my_function", + hog="function onLoad(){}", + type="site_app", + transpiled="function onLoad(){}", + enabled=True, + ) + + self.team.refresh_from_db() + self.assertTrue(self.team.inject_web_apps) + with self.assertNumQueries(9): + response = self._post_decide() + self.assertEqual(response.status_code, status.HTTP_200_OK) + injected = response.json()["siteApps"] + self.assertEqual(len(injected), 1) + self.assertTrue(injected[0]["url"].startswith(f"/site_function/{site_app.id}/")) + + # yype: site_destination + site_destination = HogFunction.objects.create( + team=self.team, + name="my_function", + hog="function onLoad(){}", + type="site_destination", + transpiled="function onLoad(){}", + enabled=True, + ) + + self.team.refresh_from_db() + self.assertTrue(self.team.inject_web_apps) + with self.assertNumQueries(8): + response = self._post_decide() + self.assertEqual(response.status_code, status.HTTP_200_OK) + injected = response.json()["siteApps"] + self.assertEqual(len(injected), 2) + self.assertTrue(injected[1]["url"].startswith(f"/site_function/{site_destination.id}/")) + def test_feature_flags(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() @@ -4690,7 +4730,7 @@ def test_site_apps_in_decide_use_replica(self, mock_is_connected): # update caches self._post_decide(api_version=3) - with self.assertNumQueries(4, using="replica"), self.assertNumQueries(0, using="default"): + with self.assertNumQueries(8, using="replica"), self.assertNumQueries(0, using="default"): response = self._post_decide(api_version=3) self.assertEqual(response.status_code, status.HTTP_200_OK) injected = response.json()["siteApps"] diff --git a/posthog/api/test/test_hog_function.py b/posthog/api/test/test_hog_function.py index 18323f7a2341b..55788bb058191 100644 --- a/posthog/api/test/test_hog_function.py +++ b/posthog/api/test/test_hog_function.py @@ -114,7 +114,7 @@ def test_free_users_cannot_use_without_template(self): assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() assert response.json()["detail"] == "The Data Pipelines addon is required to create custom functions." - def test_free_users_cannot_use_non_free_templates(self): + def test_free_users_cannot_create_non_free_templates(self): response = self._create_slack_function( { "template_id": template_webhook.id, @@ -124,6 +124,43 @@ def test_free_users_cannot_use_non_free_templates(self): assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() assert response.json()["detail"] == "The Data Pipelines addon is required for this template." + def test_free_users_can_update_non_free_templates(self): + self.organization.available_product_features = [ + {"key": AvailableFeature.DATA_PIPELINES, "name": AvailableFeature.DATA_PIPELINES} + ] + self.organization.save() + + response = self._create_slack_function( + { + "name": template_webhook.name, + "template_id": template_webhook.id, + "inputs": { + "url": {"value": "https://example.com"}, + }, + } + ) + + assert response.json()["template"]["status"] == template_webhook.status + + self.organization.available_product_features = [] + self.organization.save() + + payload = { + "name": template_webhook.name, + "template_id": template_webhook.id, + "inputs": { + "url": {"value": "https://example.com/posthog-webhook-updated"}, + }, + } + + update_response = self.client.patch( + f"/api/projects/{self.team.id}/hog_functions/{response.json()['id']}/", + data=payload, + ) + + assert update_response.status_code == status.HTTP_200_OK, update_response.json() + assert update_response.json()["inputs"]["url"]["value"] == "https://example.com/posthog-webhook-updated" + class TestHogFunctionAPI(ClickhouseTestMixin, APIBaseTest, QueryMatchingTest): def setUp(self): @@ -174,6 +211,7 @@ def test_create_hog_function(self, *args): "enabled": False, "hog": "fetch(inputs.url);", "bytecode": ["_H", HOGQL_BYTECODE_VERSION, 32, "url", 32, "inputs", 1, 2, 2, "fetch", 1, 35], + "transpiled": None, "inputs_schema": [], "inputs": {}, "filters": {"bytecode": ["_H", HOGQL_BYTECODE_VERSION, 29]}, @@ -985,3 +1023,99 @@ def test_list_with_type_filter(self, *args): response = self.client.get(f"/api/projects/{self.team.id}/hog_functions/?type=email") assert len(response.json()["results"]) == 1 + + response = self.client.get(f"/api/projects/{self.team.id}/hog_functions/?types=destination,email") + assert len(response.json()["results"]) == 2 + + def test_create_hog_function_with_site_app_type(self): + response = self.client.post( + f"/api/projects/{self.team.id}/hog_functions/", + data={ + "name": "Site App Function", + "hog": "export function onLoad() { console.log('Hello, site_app'); }", + "type": "site_app", + }, + ) + + assert response.status_code == status.HTTP_201_CREATED, response.json() + assert response.json()["bytecode"] is None + assert "Hello, site_app" in response.json()["transpiled"] + + def test_create_hog_function_with_site_destination_type(self): + response = self.client.post( + f"/api/projects/{self.team.id}/hog_functions/", + data={ + "name": "Site Destination Function", + "hog": "export function onLoad() { console.log('Hello, site_destination'); }", + "type": "site_destination", + }, + ) + + assert response.status_code == status.HTTP_201_CREATED, response.json() + assert response.json()["bytecode"] is None + assert "Hello, site_destination" in response.json()["transpiled"] + + def test_transpiled_field_not_populated_for_other_types(self): + response = self.client.post( + f"/api/projects/{self.team.id}/hog_functions/", + data={ + "name": "Regular Function", + "hog": "fetch(inputs.url);", + "type": "destination", + }, + ) + + assert response.status_code == status.HTTP_201_CREATED, response.json() + assert response.json()["bytecode"] is not None + assert response.json()["transpiled"] is None + + def test_create_hog_function_with_invalid_typescript(self): + response = self.client.post( + f"/api/projects/{self.team.id}/hog_functions/", + data={ + "name": "Invalid Site App Function", + "hog": "export function onLoad() { console.log('Missing closing brace');", + "type": "site_app", + }, + ) + + assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() + assert "detail" in response.json() + assert "Error in TypeScript code" in response.json()["detail"] + + def test_create_typescript_destination_with_inputs(self): + payload = { + "name": "TypeScript Destination Function", + "hog": "export function onLoad() { console.log(inputs.message); }", + "type": "site_destination", + "inputs_schema": [ + {"key": "message", "type": "string", "label": "Message", "required": True}, + ], + "inputs": { + "message": { + "value": "Hello, TypeScript {arrayMap(a -> a, [1, 2, 3])}!", + }, + }, + } + + response = self.client.post( + f"/api/projects/{self.team.id}/hog_functions/", + data=payload, + ) + result = response.json() + + assert response.status_code == status.HTTP_201_CREATED, response.json() + assert result["bytecode"] is None + assert "Hello, TypeScript" in result["transpiled"] + inputs = result["inputs"] + inputs["message"]["transpiled"]["stl"].sort() + assert result["inputs"] == { + "message": { + "transpiled": { + "code": 'concat("Hello, TypeScript ", arrayMap(__lambda((a) => a), [1, 2, 3]), "!")', + "lang": "ts", + "stl": sorted(["__lambda", "concat", "arrayMap"]), + }, + "value": "Hello, TypeScript {arrayMap(a -> a, [1, 2, 3])}!", + } + } diff --git a/posthog/api/test/test_hog_function_templates.py b/posthog/api/test/test_hog_function_templates.py index cd9479a10b456..956be4de638a9 100644 --- a/posthog/api/test/test_hog_function_templates.py +++ b/posthog/api/test/test_hog_function_templates.py @@ -40,6 +40,12 @@ def test_filter_function_templates(self): assert response2.json()["results"] == response3.json()["results"] assert len(response2.json()["results"]) > 5 + response4 = self.client.get("/api/projects/@current/hog_function_templates/?type=site_destination") + assert len(response4.json()["results"]) > 0 + + response5 = self.client.get("/api/projects/@current/hog_function_templates/?types=site_destination,destination") + assert len(response5.json()["results"]) > 0 + def test_public_list_function_templates(self): self.client.logout() response = self.client.get("/api/public_hog_function_templates/") diff --git a/posthog/api/test/test_site_app.py b/posthog/api/test/test_site_app.py index 92340e67144bd..23408defe752f 100644 --- a/posthog/api/test/test_site_app.py +++ b/posthog/api/test/test_site_app.py @@ -2,7 +2,7 @@ from rest_framework import status from posthog.api.site_app import get_site_config_from_schema -from posthog.models import Plugin, PluginConfig, PluginSourceFile +from posthog.models import Plugin, PluginConfig, PluginSourceFile, HogFunction from posthog.test.base import BaseTest @@ -82,3 +82,32 @@ def test_get_site_config_from_schema(self): config = {"in_site": "123", "not_in_site": "12345"} self.assertEqual(get_site_config_from_schema(schema, config), {"in_site": "123"}) self.assertEqual(get_site_config_from_schema(None, None), {}) + + def test_site_function(self): + # Create a HogFunction object + hog_function = HogFunction.objects.create( + enabled=True, + team=self.team, + type="site_app", + transpiled="function test() {}", + ) + + response = self.client.get( + f"/site_function/{hog_function.id}/somehash/", + HTTP_ORIGIN="http://127.0.0.1:8000", + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.content.decode("utf-8"), hog_function.transpiled) + self.assertEqual(response["Cache-Control"], "public, max-age=31536000") + + def test_site_function_not_found(self): + response = self.client.get( + f"/site_function/non-existent-id/somehash/", + HTTP_ORIGIN="http://127.0.0.1:8000", + ) + + self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) + response_json = response.json() + self.assertEqual(response_json["code"], "missing_site_function_source") + self.assertEqual(response_json["detail"], "Unable to serve site function source code.") diff --git a/posthog/cdp/filters.py b/posthog/cdp/filters.py index 04fde83e5c532..f95d3f029a0f3 100644 --- a/posthog/cdp/filters.py +++ b/posthog/cdp/filters.py @@ -40,7 +40,10 @@ def hog_function_filters_to_expr(filters: dict, team: Team, actions: dict[int, A # Actions if filter.get("type") == "actions": try: - action = actions[int(filter["id"])] + action_id = int(filter["id"]) + action = actions.get(action_id, None) + if not action: + action = Action.objects.get(id=action_id, team=team) exprs.append(action_to_expr(action)) except KeyError: # If an action doesn't exist, we want to return no events diff --git a/posthog/cdp/site_functions.py b/posthog/cdp/site_functions.py new file mode 100644 index 0000000000000..6f4f7a3d5ffa5 --- /dev/null +++ b/posthog/cdp/site_functions.py @@ -0,0 +1,102 @@ +import json + +from posthog.cdp.filters import hog_function_filters_to_expr +from posthog.cdp.validation import transpile_template_code +from posthog.hogql.compiler.javascript import JavaScriptCompiler +from posthog.models.plugin import transpile +from posthog.models.team.team import Team + + +def get_transpiled_function(id: str, source: str, filters: dict, inputs: dict, team: Team) -> str: + # Wrap in IIFE = Immediately Invoked Function Expression = to avoid polluting global scope + response = "(function() {\n\n" + + # PostHog-JS adds itself to the window object for us to use + response += f"const posthog = window['__$$ph_site_app_{id}_posthog'] || window['__$$ph_site_app_{id}'] || window['posthog'];\n" + response += f"const missedInvocations = window['__$$ph_site_app_{id}_missed_invocations'] || (() => []);\n" + response += f"const callback = window['__$$ph_site_app_{id}_callback'] || (() => {'{}'});\n" + + # Build the inputs in three parts: + # 1) a simple object with constants/scalars + inputs_object: list[str] = [] + # 2) a function with a switch + try/catch that calculates the input from globals + inputs_switch = "" + # 3) code that adds all calculated inputs to the inputs object + inputs_append: list[str] = [] + + compiler = JavaScriptCompiler() + + # TODO: reorder inputs to make dependencies work + for key, input in inputs.items(): + value = input.get("value") + key_string = json.dumps(str(key) or "") + if (isinstance(value, str) and "{" in value) or isinstance(value, dict) or isinstance(value, list): + base_code = transpile_template_code(value, compiler) + inputs_switch += f"case {key_string}: return {base_code};\n" + inputs_append.append(f"inputs[{key_string}] = getInputsKey({json.dumps(key)});") + else: + inputs_object.append(f"{key_string}: {json.dumps(value)}") + + # Convert the filters to code + filters_expr = hog_function_filters_to_expr(filters, team, {}) + filters_code = compiler.visit(filters_expr) + + # Start with the STL functions + response += compiler.get_stl_code() + "\n" + + # A function to calculate the inputs from globals. If "initial" is true, no errors are logged. + response += "function buildInputs(globals, initial) {\n" + + # Add all constant inputs directly + response += "let inputs = {\n" + (",\n".join(inputs_object)) + "};\n" + + # Transpiled Hog code needs a "__getGlobal" function in scope + response += "let __getGlobal = (key) => key === 'inputs' ? inputs : globals[key];\n" + + if inputs_switch: + # We do it this way to be resilient to errors + response += "function getInputsKey(key, initial) { try { switch (key) {\n" + response += inputs_switch + response += "default: return null; }\n" + response += "} catch (e) { if(!initial) {console.error('[POSTHOG-JS] Unable to compute value for inputs', key, e);} return null } }\n" + response += "\n".join(inputs_append) + "\n" + response += "return inputs;}\n" + + # See plugin-transpiler/src/presets.ts + # transpile(source, 'site') == `(function () {let exports={};${code};return exports;})` + response += f"const response = {transpile(source, 'site')}();" + + response += ( + """ + function processEvent(globals) { + if (!('onEvent' in response)) { return; }; + const inputs = buildInputs(globals); + const filterGlobals = { ...globals.groups, ...globals.event, person: globals.person, inputs, pdi: { distinct_id: globals.event.distinct_id, person: globals.person } }; + let __getGlobal = (key) => filterGlobals[key]; + const filterMatches = """ + + filters_code + + """; + if (filterMatches) { response.onEvent({ ...globals, inputs, posthog }); } + } + if ('onLoad' in response) { + const r = response.onLoad({ inputs: buildInputs({}, true), posthog: posthog }); + const done = (success = true) => { + if (success) { + missedInvocations().forEach(processEvent); + posthog.on('eventCaptured', (event) => { processEvent(posthog.siteApps.globalsForEvent(event)) }); + } else { + console.error('[POSTHOG-JS] Site function failed to load', response) + } + callback(success); + }; + if (r && typeof r.then === 'function' && typeof r.finally === 'function') { r.catch(() => done(false)).then(() => done(true)) } else { done(true) } + } else if ('onEvent' in response) { + missedInvocations().forEach(processEvent); + posthog.on('eventCaptured', (event) => { processEvent(posthog.siteApps.globalsForEvent(event)) }) + } + """ + ) + + response += "\n})();" + + return response diff --git a/posthog/cdp/templates/__init__.py b/posthog/cdp/templates/__init__.py index e19593c3ef2d1..3434fab954d16 100644 --- a/posthog/cdp/templates/__init__.py +++ b/posthog/cdp/templates/__init__.py @@ -20,7 +20,7 @@ from .zapier.template_zapier import template as zapier from .mailgun.template_mailgun import template_mailgun_send_email as mailgun from .avo.template_avo import template as avo, TemplateAvoMigrator -from .loops.template_loops import template as loops, TemplateLoopsMigrator +from .loops.template_loops import template as loops, template_send_event as loops_send_event, TemplateLoopsMigrator from .rudderstack.template_rudderstack import template as rudderstack, TemplateRudderstackMigrator from .gleap.template_gleap import template as gleap from .google_pubsub.template_google_pubsub import template as google_pubsub, TemplateGooglePubSubMigrator @@ -40,10 +40,17 @@ ) from .airtable.template_airtable import template as airtable from .brevo.template_brevo import template as brevo +from ._siteapps.template_early_access_features import template as early_access_features +from ._siteapps.template_hogdesk import template as hogdesk +from ._siteapps.template_notification_bar import template as notification_bar +from ._siteapps.template_pineapple_mode import template as pineapple_mode from ._internal.template_broadcast import template_new_broadcast as _broadcast +from ._internal.template_blank import blank_site_destination, blank_site_app HOG_FUNCTION_TEMPLATES = [ _broadcast, + blank_site_destination, + blank_site_app, slack, webhook, activecampaign, @@ -69,6 +76,7 @@ klaviyo_user, knock, loops, + loops_send_event, mailchimp, mailgun, mailjet_create_contact, @@ -84,6 +92,10 @@ sendgrid, zapier, zendesk, + early_access_features, + hogdesk, + notification_bar, + pineapple_mode, ] diff --git a/posthog/cdp/templates/_internal/template_blank.py b/posthog/cdp/templates/_internal/template_blank.py new file mode 100644 index 0000000000000..4f141ad9fc3e8 --- /dev/null +++ b/posthog/cdp/templates/_internal/template_blank.py @@ -0,0 +1,77 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +blank_site_destination: HogFunctionTemplate = HogFunctionTemplate( + status="client-side", + type="site_destination", + id="template-blank-site-destination", + name="New client-side destination", + description="Run code on your website when an event is sent to PostHog. Works only with posthog-js when opt_in_site_apps is set to true.", + icon_url="/static/hedgehog/builder-hog-01.png", + category=["Custom", "Analytics"], + hog=""" +export async function onLoad({ inputs, posthog }) { + console.log('🦔 Loading (takes 1 sec)', { inputs }) + // onEvent will not be called until this function resolves + await new Promise((resolve) => window.setTimeout(resolve, 1000)) + console.log("🦔 Script loaded") +} +export function onEvent({ posthog, ...globals }) { + const { event, person } = globals + console.log(`🦔 Sending event: ${event.event}`, globals) +} +""".strip(), + inputs_schema=[ + { + "key": "name", + "type": "string", + "label": "Name", + "description": "What's your name?", + "default": "Max", + }, + { + "key": "userId", + "type": "string", + "label": "User ID", + "description": "User ID", + "default": "{event.distinct_id}", + "secret": False, + "required": True, + }, + { + "key": "additionalProperties", + "type": "json", + "label": "Additional properties", + "description": "Additional properties for the Exported Object.", + "default": { + "email": "{person.properties.email}", + "browser": "{event.properties.$browser}", + }, + "secret": False, + "required": True, + }, + ], +) + +blank_site_app: HogFunctionTemplate = HogFunctionTemplate( + status="client-side", + type="site_app", + id="template-blank-site-app", + name="New site app", + description="Run custom JavaScript on your website. Works only with posthog-js when opt_in_site_apps is set to true.", + icon_url="/static/hedgehog/builder-hog-03.png", + category=["Custom", "Analytics"], + hog=""" +export function onLoad({ inputs, posthog }) { + console.log(`Hello ${inputs.name} from your new Site App!`) +} +""".strip(), + inputs_schema=[ + { + "key": "name", + "type": "string", + "label": "Name", + "description": "What's your name?", + "default": "Max", + }, + ], +) diff --git a/posthog/cdp/templates/_siteapps/template_early_access_features.py b/posthog/cdp/templates/_siteapps/template_early_access_features.py new file mode 100644 index 0000000000000..8031e437ed542 --- /dev/null +++ b/posthog/cdp/templates/_siteapps/template_early_access_features.py @@ -0,0 +1,427 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="client-side", + type="site_app", + id="template-early-access-features", + name="Early Access Features App", + description="This app is used with Early Access Feature Management", + icon_url="https://raw.githubusercontent.com/PostHog/early-access-features-app/refs/heads/main/logo.png", + category=["Custom"], + hog=""" +const style = (inputs) => ` + .list-container { + flex: 1; + flex-direction: row; + overflow-y: auto; + } + + .info { + flex: 2; + } + + .list-item { + padding: 15px 30px; + height: 35%; + display: flex; + flex-direction: row; + align-items: center; + justify-content: space-between; + border-bottom: 1px solid #00000026; + + .list-item-name { + font-size: 18px; + } + + .list-item-description { + font-size: 14px; + } + + .list-item-documentation-link { + margin-top: 15px; + + .label { + text-decoration: none; + } + } + } + + .list-content { + margin-right: 20px; + } + + .beta-feature-button { + position: fixed; + bottom: 20px; + right: 20px; + font-weight: normal; + font-family: -apple-system, BlinkMacSystemFont, "Inter", "Segoe UI", "Roboto", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; + text-align: left; + z-index: ${parseInt(inputs.zIndex) || 99999}; + display: flex; + justify-content: center; + align-items: center; + } + + .top-section { + padding: 15px 30px; + display: flex; + flex-direction: row; + align-items: center; + justify-content: space-between; + border-bottom: 1px solid #00000026; + } + + .beta-list-cancel { + cursor: pointer; + } + + .title { + font-size: 16px; + font-weight: bold; + } + + .popup { + position: fixed; + top: 50%; + left: 50%; + color: black; + transform: translate(-50%, -50%); + font-weight: normal; + font-family: -apple-system, BlinkMacSystemFont, "Inter", "Segoe UI", "Roboto", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; + text-align: left; + z-index: ${parseInt(inputs.zIndex) || 99999}; + + display: none; + flex-direction: column; + background: white; + border: 1px solid #f0f0f0; + border-radius: 8px; + padding-top: 5px; + width: 40rem; + height: 50%; + box-shadow: -6px 0 16px -8px rgb(0 0 0 / 8%), -9px 0 28px 0 rgb(0 0 0 / 5%), -12px 0 48px 16px rgb(0 0 0 / 3%); + } + + .beta-feature-button { + width: 64px; + height: 64px; + border-radius: 100%; + text-align: center; + line-height: 60px; + font-size: 32px; + border: none; + cursor: pointer; + } + .beta-feature-button:hover { + filter: brightness(1.2); + } + + .empty-prompt { + flex: 1; + text-align: center; + margin-top: 20px; + } + + /* The switch - the box around the slider */ + .switch { + margin-left: 10px; + margin-right: 10px; + position: relative; + display: inline-block; + min-width: 50px; + height: 24px; + } + + /* Hide default HTML checkbox */ + .switch input { + opacity: 0; + width: 0; + height: 0; + } + + /* The slider */ + .slider { + position: absolute; + cursor: pointer; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: #00000026; + -webkit-transition: .4s; + transition: background-color .4s; + cursor: pointer; + } + + .slider:before { + position: absolute; + content: ""; + height: 20px; + width: 20px; + left: -10px; + bottom: -6px; + background-color: #ffffff; + -webkit-transition: .2s; + transition: .2s; + border: 2px solid #00000026; + } + + input:checked + .slider { + background-color: #00000026; + } + + input:focus + .slider { + box-shadow: 0 0 1px #00000026; + } + + input:checked + .slider:before { + -webkit-transform: translateX(26px); + -ms-transform: translateX(26px); + transform: translateX(26px); + background-color: #1d4aff; + } + + /* Rounded sliders */ + .slider.round { + border-radius: 20px; + height: 10px; + width: 30px; + background-color: #00000026; + } + + .slider.round:before { + border-radius: 50%; + } + + .loader-container { + display: flex; + justify-content: center; + align-items: center; + height: 50%; + width: 100%; + } + + .loader { + border: 8px solid #00000026; /* Light grey */ + border-top: 8px solid #1d4aff; /* Blue */ + border-radius: 50%; + width: 60px; + height: 60px; + animation: spin 2s linear infinite; + } + + @keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } + } +` + +interface PreviewItem { + name: string + description: string + flagKey: string + documentationUrl: string +} + +export function onLoad({ inputs, posthog }) { + if (inputs.domains) { + const domains = inputs.domains.split(',').map((domain) => domain.trim()) + if (domains.length > 0 && domains.indexOf(window.location.hostname) === -1) { + return + } + } + const shadow = createShadow(style(inputs)) + + function optIn(flagKey: string) { + posthog.updateEarlyAccessFeatureEnrollment(flagKey, true) + } + + function optOut(flagKey: string) { + posthog.updateEarlyAccessFeatureEnrollment(flagKey, false) + } + + function openbugBox() { + posthog.getEarlyAccessFeatures((previewItemData) => { + const betaListContainer = shadow.getElementById('list-container') + if (betaListContainer) { + const previewItems = listItemComponents(previewItemData) + const previewList = previewItems + ? ` +
    + ${previewItems} +
    + ` + : ` +
    + No beta features available +
    + ` + betaListContainer.innerHTML = previewList + + previewItemData.forEach((item, index) => { + const checkbox = shadow.querySelector('.checkbox-' + index) + checkbox?.addEventListener('click', (e) => { + if (e.target?.checked) { + optIn(item.flagKey) + } else { + optOut(item.flagKey) + } + }) + }) + } + }, true) // Force reload always + + Object.assign(listElement.style, { display: 'flex' }) + + const closeButton = shadow.querySelector('.beta-list-cancel') + closeButton?.addEventListener('click', (e) => { + e.preventDefault() + Object.assign(listElement.style, { display: 'none' }) + }) + + // // Hide when clicked outside + // const _betaList = document.getElementById('beta-list') + // document.addEventListener('click', function(event) { + // const isClickInside = _betaList?.contains(event.target) + + // if (!isClickInside) { + // // Object.assign(formElement.style, { display: 'none' }) + // } + // }); + } + + // TODO: Make this button a inputs option + const buttonElement = Object.assign(document.createElement('button'), { + className: 'beta-feature-button', + onclick: openbugBox, + title: inputs.buttonTitle || '', + }) + + buttonElement.innerHTML = ` + + + + + + ` + + Object.assign(buttonElement.style, { + color: inputs.buttonColor || 'white', + background: inputs.buttonBackground || '#1d4aff', + }) + + if (inputs.useButton === 'Yes') { + shadow.appendChild(buttonElement) + } + + const CloseButtonComponent = (width: number, height: number) => ` + + + + ` + + const BetaListComponent = ` +
    +
    Enable beta features
    +
    + ${CloseButtonComponent(30, 30)} +
    +
    +
    +
    +
    +
    +
    + ` + + const betaListElement = document.createElement('div') + betaListElement.id = 'beta-list' + const listElement = Object.assign(betaListElement, { + className: 'popup', + innerHTML: BetaListComponent, + }) + + shadow.appendChild(listElement) + + if (inputs.selector) { + const clickListener = (e) => { + if (e.target.closest(inputs.selector)) { + openbugBox() + } + } + window.addEventListener('click', clickListener) + } + + const listItemComponents = (items?: PreviewItem[]) => { + if (items) { + return items + .map((item, index) => { + const checked = posthog.isFeatureEnabled(item.flagKey) + + const documentationLink = item.documentationUrl + ? ` + ` + : '' + return ` +
    +
    + ${item.name} +
    ${item.description}
    + ${documentationLink} +
    + +
    + ` + }) + .join('') + } + return '' + } +} + +function createShadow(style?: string): ShadowRoot { + const div = document.createElement('div') + const shadow = div.attachShadow({ mode: 'open' }) + if (style) { + const styleElement = Object.assign(document.createElement('style'), { + innerText: style, + }) + shadow.appendChild(styleElement) + } + document.body.appendChild(div) + return shadow +} +""".strip(), + inputs_schema=[ + { + "key": "selector", + "label": "Selector", + "description": 'CSS selector to activate on. For example: "#my-beta-button" or "[data-attr=\'posthog-early-access-features-button\']"', + "type": "string", + "default": "", + }, + { + "key": "useButton", + "label": "Show features button on the page", + "description": "If enabled, a button will be shown on the page that will open the features modal.", + "type": "choice", + "choices": [ + { + "label": "Yes", + "value": "Yes", + }, + { + "label": "No", + "value": "No", + }, + ], + "default": "No", + }, + ], +) diff --git a/posthog/cdp/templates/_siteapps/template_hogdesk.py b/posthog/cdp/templates/_siteapps/template_hogdesk.py new file mode 100644 index 0000000000000..6949f37bb4ce7 --- /dev/null +++ b/posthog/cdp/templates/_siteapps/template_hogdesk.py @@ -0,0 +1,386 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="client-side", + type="site_app", + id="template-hogdesk", + name="HogDesk", + description="HogDesk bug reporter", + icon_url="https://raw.githubusercontent.com/PostHog/bug-report-app/refs/heads/main/logo.png", + category=["Custom"], + hog=""" +const style = (inputs) => ` + .form, .button, .thanks { + position: fixed; + bottom: 20px; + right: 20px; + color: black; + font-weight: normal; + font-family: -apple-system, BlinkMacSystemFont, "Inter", "Segoe UI", "Roboto", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; + text-align: left; + z-index: ${parseInt(inputs.zIndex) || 99999}; + } + .button { + width: 64px; + height: 64px; + border-radius: 100%; + text-align: center; + line-height: 60px; + font-size: 32px; + border: none; + cursor: pointer; + } + .button:hover { + filter: brightness(1.2); + } + .form-submit[disabled] { + opacity: 0.6; + filter: grayscale(100%); + cursor: not-allowed; + } + .thanks { + background: white; + } + .form { + display: none; + flex-direction: column; + background: white; + border: 1px solid #f0f0f0; + border-radius: 8px; + padding-top: 5px; + max-width: 380px; + box-shadow: -6px 0 16px -8px rgb(0 0 0 / 8%), -9px 0 28px 0 rgb(0 0 0 / 5%), -12px 0 48px 16px rgb(0 0 0 / 3%); + } + .form textarea { + color: #2d2d2d; + font-size: 14px; + font-family: -apple-system, BlinkMacSystemFont, "Inter", "Segoe UI", "Roboto", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; + margin-bottom: 10px; + background: white; + color: black; + border: none; + outline: none; + padding-left: 10px; + padding-right: 10px; + padding-top: 10px; + } + .form-submit { + box-sizing: border-box; + margin: 0; + font-family: inherit; + overflow: visible; + text-transform: none; + line-height: 1.5715; + position: relative; + display: inline-block; + font-weight: 400; + white-space: nowrap; + text-align: center; + border: 1px solid transparent; + cursor: pointer; + transition: all 0.3s cubic-bezier(0.645, 0.045, 0.355, 1); + user-select: none; + touch-action: manipulation; + height: 32px; + padding: 4px 15px; + font-size: 14px; + border-radius: 4px; + outline: 0; + color: #fff; + border-color: #1d4aff; + background: #1d4aff; + text-shadow: 0 -1px 0 rgba(0, 0, 0, 0.12); + box-shadow: 0 2px 0 rgba(0, 0, 0, 0.045); + } + .form-submit:hover { + filter: brightness(1.2); + } + .form-cancel { + box-sizing: border-box; + margin: 0; + font-family: inherit; + overflow: visible; + text-transform: none; + line-height: 1.5715; + position: relative; + display: inline-block; + font-weight: 400; + white-space: nowrap; + text-align: center; + border: 1px solid transparent; + box-shadow: 0 2px 0 rgba(0, 0, 0, 0.015); + cursor: pointer; + transition: all 0.3s cubic-bezier(0.645, 0.045, 0.355, 1); + user-select: none; + touch-action: manipulation; + height: 32px; + padding: 4px 15px; + font-size: 14px; + border-radius: 4px; + color: #2d2d2d; + border-color: rgba(0, 0, 0, 0.15); + background: #fff; + outline: 0; + } + .thanks { + display: none; + font-size: 14px; + padding: 20px; + border: 1px solid #f0f0f0; + border-radius: 8px; + box-shadow: -6px 0 16px -8px rgb(0 0 0 / 8%), -9px 0 28px 0 rgb(0 0 0 / 5%), -12px 0 48px 16px rgb(0 0 0 / 3%); + max-width: 340px; + margin-block-end: 1em; + } + .bolded { font-weight: 600; } + .bottom-section { + border-top: 1px solid #f0f0f0; + padding: 10px 16px; + } + .buttons { + display: flex; + justify-content: space-between; + } + .specific-issue { + padding-top: 10px; + font-size: 14px; + color: #747ea1; + font-family: -apple-system, BlinkMacSystemFont, "Inter", "Segoe UI", "Roboto", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; + } + .specific-issue a:link { + color: #5879FF; + } + .specific-issue a:visited { + color: #5879FF; + } +` + +export function onLoad({ inputs, posthog }) { + if (inputs.domains) { + const domains = inputs.domains.split(',').map((domain) => domain.trim()) + if (domains.length > 0 && domains.indexOf(window.location.hostname) === -1) { + return + } + } + const shadow = createShadow(style(inputs)) + + function openbugBox() { + Object.assign(buttonElement.style, { display: 'none' }) + Object.assign(formElement.style, { display: 'flex' }) + + const closeButton = shadow.querySelector('.form-cancel') + closeButton.addEventListener('click', (e) => { + e.preventDefault() + Object.assign(formElement.style, { display: 'none' }) + }) + } + + const buttonElement = Object.assign(document.createElement('button'), { + className: 'button', + innerText: inputs.buttonText || '?', + onclick: openbugBox, + title: inputs.buttonTitle || '', + }) + Object.assign(buttonElement.style, { + color: inputs.buttonColor || 'black', + background: inputs.buttonBackground || '#1d8db9', + }) + + if (inputs.useButton === 'Yes') { + shadow.appendChild(buttonElement) + } + + const form = ` + + +
    +
    + Close + +
    +
    +
    + ` + + const getSessionRecordingUrl = () => { + const sessionId = posthog?.sessionRecording?.sessionId + const LOOK_BACK = 30 + const recordingStartTime = Math.max( + Math.floor((new Date().getTime() - (posthog?.sessionManager?._sessionStartTimestamp || 0)) / 1000) - + LOOK_BACK, + 0 + ) + const api_host = posthog?.config?.api_host || 'https://app.posthog.com' + return sessionId ? `${api_host}/recordings/${sessionId}?t=${recordingStartTime}` : undefined + } + + const formElement = Object.assign(document.createElement('form'), { + className: 'form', + innerHTML: form, + onsubmit: function (e) { + e.preventDefault() + const sessionRecordingUrl = getSessionRecordingUrl() + posthog.capture(inputs.eventName || 'bug Sent', { + [inputs.bugProperty || '$bug']: this.bug.value, + sessionRecordingUrl: sessionRecordingUrl, + email: this.email.value + }) + Object.assign(formElement.style, { display: 'none' }) + Object.assign(thanksElement.style, { display: 'flex' }) + window.setTimeout(() => { + Object.assign(thanksElement.style, { display: 'none' }) + }, 3000) + formElement.reset() + }, + }) + const textarea = formElement.getElementsByClassName('bug-textarea')[0] as HTMLTextAreaElement + const emailInput = formElement.getElementsByClassName('bug-emailinput')[0] as HTMLInputElement + + const cancelButton = formElement.getElementsByClassName('form-cancel')[0] as HTMLElement + const submitButton = formElement.getElementsByClassName('form-submit')[0] as HTMLButtonElement + const footerArea = formElement.getElementsByClassName('specific-issue')[0] as HTMLElement + + Object.assign(submitButton.style, { + color: inputs.buttonColor || 'white', + background: inputs.buttonBackground || '#1d8db9', + borderColor: inputs.buttonBackground || '#1d8db9', + }) + + textarea.addEventListener('input', (e) => { + if (textarea.value.length > 0) { + submitButton.disabled = false + } else { + submitButton.disabled = true + } + }) + + textarea.setAttribute('placeholder', inputs.placeholderText || 'Help us improve') + cancelButton.innerText = inputs.cancelButtonText || 'Cancel' + submitButton.innerText = inputs.sendButtonText || 'Send bug' + if (inputs.footerHTML) { + footerArea.innerHTML = inputs.footerHTML + } else { + footerArea.style.display = 'none' + } + shadow.appendChild(formElement) + + if (inputs.selector) { + const clickListener = (e) => { + if (e.target.matches(inputs.selector)) { + openbugBox() + } + } + window.addEventListener('click', clickListener) + } + + console.log('Posthog - latest bug widget') + + const thanksElement = Object.assign(document.createElement('div'), { + className: 'thanks', + innerHTML: '
    ' + inputs.thanksText + '
    ' || 'Thank you!', + }) + shadow.appendChild(thanksElement) +} + +function createShadow(styleSheet: string): ShadowRoot { + const div = document.createElement('div') + const shadow = div.attachShadow({ mode: 'open' }) + if (styleSheet) { + const styleElement = Object.assign(document.createElement('style'), { + innerText: styleSheet, + }) + shadow.appendChild(styleElement) + } + document.body.appendChild(div) + return shadow +} +""".strip(), + inputs_schema=[ + { + "key": "domains", + "label": "Domains", + "description": 'Comma separated list of domains to activate on. Leave blank to enable all. For example: "localhost,app.posthog.com"', + "type": "string", + "default": "", + }, + { + "key": "selector", + "label": "Selector", + "description": 'CSS selector to activate on. For example: "#my-bug-button" or "[data-attr=\'posthog-bug-button\']"', + "type": "string", + "default": "", + }, + { + "key": "useButton", + "label": "Show bug button on the page", + "description": "Alternatively, any click on an element with the selector [data-attr='posthog-bug-button'] will open the bug widget", + "type": "choice", + "choices": [ + { + "label": "Yes", + "value": "Yes", + }, + { + "label": "No", + "value": "No", + }, + ], + "default": "Yes", + "required": False, + }, + {"key": "buttonText", "label": "Button text", "type": "string", "default": "✉️", "required": True}, + { + "key": "buttonTitle", + "label": "Button title", + "description": "The text that appears when you hover over the button", + "type": "string", + "default": "", + }, + {"key": "buttonBackground", "label": "Button background", "type": "string", "default": ""}, + {"key": "buttonColor", "label": "Button text color", "type": "string", "default": ""}, + {"key": "placeholderText", "label": "Placeholder text", "type": "string", "default": "Help us improve"}, + { + "key": "sendButtonText", + "label": "Send button text", + "type": "string", + "default": "Send bug", + "required": True, + }, + {"key": "cancelButtonText", "label": "Cancel button text", "type": "string", "default": "Cancel"}, + { + "key": "thanksText", + "label": "Thank you text", + "type": "string", + "default": "Thank you! Closing in 3 seconds...", + "required": True, + }, + { + "key": "footerHTML", + "label": "Footer HTML", + "description": "HTML to show in the footer of the bug widget. For example: \"More questions? Ask us anything\"", + "type": "string", + "default": "Have a specific issue? Contact support directly!", + }, + { + "key": "eventName", + "label": "bug event's event name", + "type": "string", + "default": "bug Sent", + "required": True, + }, + { + "key": "bugProperty", + "label": "bug event's bug property", + "type": "string", + "default": "$bug", + "required": True, + }, + { + "key": "zIndex", + "label": "z-index of the form and the button (default to 999999)", + "type": "string", + "default": "999999", + "required": True, + }, + ], +) diff --git a/posthog/cdp/templates/_siteapps/template_notification_bar.py b/posthog/cdp/templates/_siteapps/template_notification_bar.py new file mode 100644 index 0000000000000..c6dbb3a713ea3 --- /dev/null +++ b/posthog/cdp/templates/_siteapps/template_notification_bar.py @@ -0,0 +1,160 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="client-side", + type="site_app", + id="template-notification-bar", + name="Notification Bar", + description="Show a notification bar for your users", + icon_url="/static/hedgehog/list-hog.png", + category=["Custom", "Analytics"], + hog=""" +export function onLoad({ inputs }) { + if (inputs.domains) { + const domains = inputs.domains.split(',').map((domain) => domain.trim()) + if (domains.length > 0 && domains.indexOf(window.location.hostname) === -1) { + return + } + } + const localStorageKey = `notification-${inputs.notification}` + if (inputs.rememberClose === 'yes' && localStorage.getItem(localStorageKey)) { + return + } + const style = ` + .notification-bar-container { + min-height: 56px; + } + .notification-bar { + width: 100%; + min-height: 56px; + line-height: 36px; + font-size: 24px; + color: ${inputs.textColor || 'default'}; + background: ${inputs.backgroundColor || 'default'}; + font-weight: normal; + font-family: -apple-system, BlinkMacSystemFont, "Inter", "Segoe UI", "Roboto", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; + text-align: center; + position: ${inputs.position === 'sticky' ? 'fixed' : 'absolute'}; + left: 0; + top: 0; + display: flex; + align-items: center; + justify-content: center; + cursor: pointer; + z-index: 9999999; + } + .notification-bar a { + color: ${inputs.linkColor || inputs.textColor || 'default'}; + } + .notification-bar p { + margin: 0; + } + ` + const paragraph = Object.assign(document.createElement('p'), { + innerHTML: inputs.notification, + }) + const notificationElementContainer = Object.assign(document.createElement('div'), { + className: 'notification-bar-container', + }) + const notificationElement = Object.assign(document.createElement('div'), { + className: 'notification-bar', + onclick: (e) => { + if (!e.target.matches('a,button')) { + notificationElement.style.display = 'none' + notificationElementContainer.style.display = 'none' + window.localStorage.setItem(localStorageKey, 'true') + } + }, + title: inputs.buttonTitle || '', + }) + notificationElement.append(paragraph) + const shadow = createShadowRoot(style) + notificationElementContainer.appendChild(notificationElement) + shadow.appendChild(notificationElementContainer) + document.body.prepend(shadow) +} +function createShadowRoot(style) { + const div = document.createElement('div') + const shadow = div.attachShadow({ mode: 'open' }) + if (style) { + const styleElement = Object.assign(document.createElement('style'), { + innerText: style, + }) + shadow.appendChild(styleElement) + } + document.body.prepend(div) + return shadow +} +""".strip(), + inputs_schema=[ + { + "key": "domains", + "label": "Domains", + "description": 'Comma separated list of domains to activate on. Leave blank to enable all. For example: "localhost,app.posthog.com"', + "type": "string", + "default": "", + }, + { + "key": "notification", + "label": "HTML to show in the notification bar", + "type": "string", + "default": "🚀 Product 2.0! is out! Click here to learn more.", + "required": True, + }, + { + "key": "position", + "label": "Position of the notification bar", + "type": "choice", + "choices": [ + { + "label": "Sticky", + "value": "sticky", + }, + { + "label": "Top of page", + "value": "top-of-page", + }, + ], + "default": "sticky", + "required": True, + }, + { + "key": "backgroundColor", + "label": "Background color", + "type": "string", + "default": "#ebece8", + "required": True, + }, + { + "key": "textColor", + "label": "Text color", + "type": "string", + "default": "#333", + "required": True, + }, + { + "key": "linkColor", + "label": "Link color", + "type": "string", + "default": "#f64e00", + "required": True, + }, + { + "key": "rememberClose", + "label": "Remember close", + "type": "choice", + "choices": [ + { + "label": "Yes", + "value": "yes", + }, + { + "label": "No", + "value": "no", + }, + ], + "default": "yes", + "description": "Remember if the user has closed the notification bar, and don't show it again. This resets if you update the notification bar's text.", + }, + ], +) diff --git a/posthog/cdp/templates/_siteapps/template_pineapple_mode.py b/posthog/cdp/templates/_siteapps/template_pineapple_mode.py new file mode 100644 index 0000000000000..9f7453eb18c89 --- /dev/null +++ b/posthog/cdp/templates/_siteapps/template_pineapple_mode.py @@ -0,0 +1,168 @@ +from posthog.cdp.templates.hog_function_template import HogFunctionTemplate + +template: HogFunctionTemplate = HogFunctionTemplate( + status="client-side", + type="site_app", + id="template-pineapple-mode", + name="Pineapple Mode", + description="Make any website better by adding raining pineapples", + icon_url="/static/services/pineapple.png", + category=["Custom", "Analytics"], + hog=""" +const style = ` + .button { + position: fixed; + bottom: 20px; + right: 20px; + color: black; + font-weight: normal; + font-family: -apple-system, BlinkMacSystemFont, "Inter", "Segoe UI", "Roboto", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol"; + text-align: left; + width: 48px; + height: 48px; + border-radius: 100%; + text-align: center; + line-height: 40px; + font-size: 32px; + border: none; + cursor: pointer; + z-index: 999999; + } + .button:hover { + filter: brightness(1.2); + } + .button.disabled { + opacity: 0.5; + filter: grayscale(100%); + } +` +export function onLoad({ inputs, posthog }) { + if (inputs.domains) { + const domains = inputs.domains.split(',').map((domain) => domain.trim()) + if (domains.length > 0 && domains.indexOf(window.location.hostname) === -1) { + return + } + } + const intensity = Math.max(1, Math.min(parseInt(inputs.intensity) || 5, 10)) + const emoji = inputs.emoji || '🍍' + const shadow = createShadow(style) + let buttonElement: HTMLButtonElement + let rainInterval + function toggle(): void { + if (rainInterval) { + window.clearInterval(rainInterval) + rainInterval = undefined + posthog.capture('Pineapple mode deactivated', inputs) + buttonElement?.classList.remove('disabled') + } else { + rainInterval = window.setInterval(() => makeItRain(shadow, emoji, intensity), 1000 / intensity) + posthog.capture('Pineapple mode activated', inputs) + buttonElement?.classList.add('disabled') + } + } + if (inputs.showButton) { + buttonElement = Object.assign(document.createElement('button'), { + className: 'button', + innerText: inputs.buttonText || emoji, + onclick: toggle, + }) + Object.assign(buttonElement.style, { + color: inputs.buttonColor || 'black', + background: inputs.buttonBackground || '#ccae05', + }) + shadow.appendChild(buttonElement) + } + if (inputs.startRaining) { + for (let i = 0; i < intensity * 2; i++) { + makeItRain(shadow, emoji, intensity) + } + toggle() + } +} +// Drops an emoji from the sky +function makeItRain(shadow: ShadowRoot, emoji: string, intensity: number) { + const div = document.createElement('div') + Object.assign(div.style, { + position: 'fixed', + left: `${(window.innerWidth - 30) * Math.random()}px`, + top: '-10px', + fontSize: '24px', + zIndex: 99999999, + pointerEvents: 'none', + }) + div.innerHTML = emoji + shadow.appendChild(div) + const duration = 300 * (10 - intensity) + Math.random() * 3001 + div.animate([{ top: '-10px' }, { top: `${window.innerHeight + 20}px` }], { + duration, + iterations: 1, + }) + window.setTimeout(() => div.remove(), duration + 1) +} +function createShadow(style?: string): ShadowRoot { + const div = document.createElement('div') + const shadow = div.attachShadow({ mode: 'open' }) + if (style) { + const styleElement = Object.assign(document.createElement('style'), { + innerText: style, + }) + shadow.appendChild(styleElement) + } + document.body.appendChild(div) + return shadow +} +""".strip(), + inputs_schema=[ + { + "key": "domains", + "type": "string", + "label": "Domains", + "description": 'Comma separated list of domains to activate on. Leave blank to enable all. For example: "localhost,app.posthog.com"', + "default": "", + }, + { + "key": "emoji", + "type": "string", + "label": "Emoji to use", + "default": "🍍", + "required": True, + }, + { + "key": "intensity", + "type": "string", + "label": "Intensity", + "default": "4", + "required": True, + "description": "Rainfall intensity (1-10)", + }, + { + "key": "startRaining", + "type": "boolean", + "label": "Start raining immediately", + "default": True, + "required": True, + }, + { + "key": "showButton", + "type": "boolean", + "label": "Show Floating Button", + "description": "Shows a button you can use to disable the pineapple mode", + "default": True, + }, + {"key": "buttonText", "type": "string", "label": "Button text, if enabled", "default": ""}, + { + "key": "buttonColor", + "type": "string", + "label": "Button text color", + "description": 'Any valid CSS color. For example: "#ff0000" or "red"', + "default": "black", + }, + { + "key": "buttonBackground", + "type": "string", + "label": "Button background", + "description": 'Any valid CSS background. For example: "red" or "url(\'...\')"', + "default": "#ccae05", + }, + ], +) diff --git a/posthog/cdp/templates/hog_function_template.py b/posthog/cdp/templates/hog_function_template.py index f0a29fd7d2cff..c3227f9b8eb73 100644 --- a/posthog/cdp/templates/hog_function_template.py +++ b/posthog/cdp/templates/hog_function_template.py @@ -25,8 +25,19 @@ class HogFunctionSubTemplate: @dataclasses.dataclass(frozen=True) class HogFunctionTemplate: - status: Literal["alpha", "beta", "stable", "free"] - type: Literal["destination", "shared", "email", "sms", "push", "broadcast", "activity", "alert"] + status: Literal["alpha", "beta", "stable", "free", "client-side"] + type: Literal[ + "destination", + "site_destination", + "site_app", + "shared", + "email", + "sms", + "push", + "broadcast", + "activity", + "alert", + ] id: str name: str description: str diff --git a/posthog/cdp/templates/loops/template_loops.py b/posthog/cdp/templates/loops/template_loops.py index 6052eed186b36..2e65906889539 100644 --- a/posthog/cdp/templates/loops/template_loops.py +++ b/posthog/cdp/templates/loops/template_loops.py @@ -7,28 +7,46 @@ type="destination", id="template-loops", name="Loops", - description="Send events to Loops", + description="Update contacts in Loops.so", icon_url="/static/services/loops.png", category=["Email Marketing"], hog=""" -let apiKey := inputs.apiKey +if (empty(inputs.email)) { + print('No email set. Skipping...') + return +} let payload := { - 'userId': event.distinct_id, - 'eventName': event.event == '$set' ? '$identify' : event.event, - 'email': person.properties.email + 'email': inputs.email, + 'userId': person.id, +} + +if (inputs.include_all_properties) { + for (let key, value in person.properties) { + if (not empty(value) and not key like '$%') { + payload[key] := value + } + } } -for (let key, value in person.properties) { - payload[key] := value + +for (let key, value in inputs.properties) { + if (not empty(value)) { + payload[key] := value + } } -fetch('https://app.loops.so/api/v1/events/send', { + +let res := fetch('https://app.loops.so/api/v1/contacts/update', { 'method': 'POST', 'headers': { 'Content-Type': 'application/json', - 'Authorization': f'Bearer {apiKey}', + 'Authorization': f'Bearer {inputs.apiKey}', }, 'body': payload }) + +if (res.status >= 400) { + throw Error(f'Error from app.loops.so (status {res.status}): {res.body}') +} """.strip(), inputs_schema=[ { @@ -39,7 +57,37 @@ "default": "", "secret": True, "required": True, - } + }, + { + "key": "email", + "type": "string", + "label": "Email of the user", + "description": "Where to find the email of the user.", + "default": "{person.properties.email}", + "secret": False, + "required": True, + }, + { + "key": "include_all_properties", + "type": "boolean", + "label": "Include all properties as attributes", + "description": "If set, all person properties will be included. Individual attributes can be overridden below.", + "default": False, + "secret": False, + "required": True, + }, + { + "key": "properties", + "type": "dictionary", + "label": "Property mapping", + "description": "Map of Loops.so properties and their values. You can use the filters section to filter out unwanted events.", + "default": { + "firstName": "{person.properties.firstname}", + "lastName": "{person.properties.lastname}", + }, + "secret": False, + "required": False, + }, ], filters={ "events": [ @@ -51,6 +99,103 @@ }, ) +template_send_event: HogFunctionTemplate = HogFunctionTemplate( + status="beta", + type="destination", + id="template-loops-event", + name="Loops", + description="Send events to Loops.so", + icon_url="/static/services/loops.png", + category=["Email Marketing"], + hog=""" +if (empty(inputs.email)) { + print('No email set. Skipping...') + return +} + +let payload := { + 'email': inputs.email, + 'userId': person.id, + 'eventName': event.event, + 'eventProperties': {} +} + +if (inputs.include_all_properties) { + for (let key, value in event.properties) { + if (not empty(value) and not key like '$%') { + payload.eventProperties[key] := value + } + } +} + +for (let key, value in inputs.properties) { + if (not empty(value)) { + payload.eventProperties[key] := value + } +} + +let res := fetch('https://app.loops.so/api/v1/events/send', { + 'method': 'POST', + 'headers': { + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {inputs.apiKey}', + }, + 'body': payload +}) + +if (res.status >= 400) { + throw Error(f'Error from app.loops.so (status {res.status}): {res.body}') +} +""".strip(), + inputs_schema=[ + { + "key": "apiKey", + "type": "string", + "label": "Loops API Key", + "description": "Loops API Key", + "default": "", + "secret": True, + "required": True, + }, + { + "key": "email", + "type": "string", + "label": "Email of the user", + "description": "Where to find the email of the user.", + "default": "{person.properties.email}", + "secret": False, + "required": True, + }, + { + "key": "include_all_properties", + "type": "boolean", + "label": "Include all properties as attributes", + "description": "If set, all event properties will be included. Individual attributes can be overridden below.", + "default": False, + "secret": False, + "required": True, + }, + { + "key": "properties", + "type": "dictionary", + "label": "Property mapping", + "description": "Map of Loops.so properties and their values. You can use the filters section to filter out unwanted events.", + "default": { + "pathname": "{event.properties.$pathname}", + }, + "secret": False, + "required": False, + }, + ], + filters={ + "events": [ + {"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}, + ], + "actions": [], + "filter_test_accounts": True, + }, +) + class TemplateLoopsMigrator(HogFunctionTemplateMigrator): plugin_url = "https://github.com/PostHog/posthog-loops-plugin" diff --git a/posthog/cdp/templates/loops/test_template_loops.py b/posthog/cdp/templates/loops/test_template_loops.py index 3630909cd036d..842635cda1cac 100644 --- a/posthog/cdp/templates/loops/test_template_loops.py +++ b/posthog/cdp/templates/loops/test_template_loops.py @@ -1,13 +1,21 @@ from inline_snapshot import snapshot from posthog.cdp.templates.helpers import BaseHogFunctionTemplateTest -from posthog.cdp.templates.loops.template_loops import template as template_loops +from posthog.cdp.templates.loops.template_loops import ( + template as template_loops, + template_send_event as template_loops_send_event, +) class TestTemplateLoops(BaseHogFunctionTemplateTest): template = template_loops def _inputs(self, **kwargs): - inputs = {"apiKey": "1cac089e00a708680bdb1ed9f082d5bf"} + inputs = { + "apiKey": "1cac089e00a708680bdb1ed9f082d5bf", + "email": "max@posthog.com", + "include_all_properties": False, + "properties": {"firstName": "Max", "lastName": "AI"}, + } inputs.update(kwargs) return inputs @@ -15,16 +23,46 @@ def test_function_works(self): self.run_function( inputs=self._inputs(), globals={ - "event": {"distinct_id": "66e614bd-d9f2-491e-9e2c-eeab3090f72f", "event": "$pageview"}, "person": { - "properties": {"email": "max@posthog.com", "name": "Max", "company": "PostHog"}, + "id": "c44562aa-c649-426a-a9d4-093fef0c2a4a", + "properties": {"name": "Max", "company": "PostHog"}, }, }, ) assert self.get_mock_fetch_calls()[0] == snapshot( ( - "https://app.loops.so/api/v1/events/send", + "https://app.loops.so/api/v1/contacts/update", + { + "method": "POST", + "headers": { + "Content-Type": "application/json", + "Authorization": "Bearer 1cac089e00a708680bdb1ed9f082d5bf", + }, + "body": { + "email": "max@posthog.com", + "userId": "c44562aa-c649-426a-a9d4-093fef0c2a4a", + "firstName": "Max", + "lastName": "AI", + }, + }, + ) + ) + + def test_include_all_properties(self): + self.run_function( + inputs=self._inputs(include_all_properties=True), + globals={ + "person": { + "id": "c44562aa-c649-426a-a9d4-093fef0c2a4a", + "properties": {"company": "PostHog"}, + }, + }, + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://app.loops.so/api/v1/contacts/update", { "method": "POST", "headers": { @@ -32,30 +70,115 @@ def test_function_works(self): "Authorization": "Bearer 1cac089e00a708680bdb1ed9f082d5bf", }, "body": { - "userId": "66e614bd-d9f2-491e-9e2c-eeab3090f72f", - "eventName": "$pageview", "email": "max@posthog.com", - "name": "Max", + "userId": "c44562aa-c649-426a-a9d4-093fef0c2a4a", "company": "PostHog", + "firstName": "Max", + "lastName": "AI", }, }, ) ) - def test_automatic_action_mapping(self): - for event_name, expected_action in [ - ("$identify", "$identify"), - ("$set", "$identify"), - ("$pageview", "$pageview"), - ("$create_alias", "$create_alias"), - ("$autocapture", "$autocapture"), - ("custom", "custom"), - ]: - self.run_function( - inputs=self._inputs(), - globals={ - "event": {"event": event_name, "properties": {"url": "https://example.com", "$browser": "Chrome"}}, + def test_function_requires_identifier(self): + self.run_function( + inputs=self._inputs(email=""), + ) + + assert not self.get_mock_fetch_calls() + assert self.get_mock_print_calls() == snapshot([("No email set. Skipping...",)]) + + +class TestTemplateLoopsEvent(BaseHogFunctionTemplateTest): + template = template_loops_send_event + + def _inputs(self, **kwargs): + inputs = { + "apiKey": "1cac089e00a708680bdb1ed9f082d5bf", + "email": "max@posthog.com", + "include_all_properties": False, + "properties": {"product": "PostHog"}, + } + inputs.update(kwargs) + return inputs + + def test_function_works(self): + self.run_function( + inputs=self._inputs(), + globals={ + "person": { + "id": "c44562aa-c649-426a-a9d4-093fef0c2a4a", + "properties": {"name": "Max", "company": "PostHog"}, + }, + "event": { + "event": "pageview", + "properties": {"pathname": "/pricing"}, + }, + }, + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://app.loops.so/api/v1/events/send", + { + "method": "POST", + "headers": { + "Content-Type": "application/json", + "Authorization": "Bearer 1cac089e00a708680bdb1ed9f082d5bf", + }, + "body": { + "email": "max@posthog.com", + "userId": "c44562aa-c649-426a-a9d4-093fef0c2a4a", + "eventName": "pageview", + "eventProperties": { + "product": "PostHog", + }, + }, }, ) + ) + + def test_include_all_properties(self): + self.run_function( + inputs=self._inputs(include_all_properties=True), + globals={ + "person": { + "id": "c44562aa-c649-426a-a9d4-093fef0c2a4a", + "properties": {"company": "PostHog"}, + }, + "event": { + "event": "pageview", + "properties": {"pathname": "/pricing"}, + }, + }, + ) + + assert self.get_mock_fetch_calls()[0] == snapshot( + ( + "https://app.loops.so/api/v1/events/send", + { + "method": "POST", + "headers": { + "Content-Type": "application/json", + "Authorization": "Bearer 1cac089e00a708680bdb1ed9f082d5bf", + }, + "body": { + "email": "max@posthog.com", + "userId": "c44562aa-c649-426a-a9d4-093fef0c2a4a", + "eventName": "pageview", + "eventProperties": { + "product": "PostHog", + "pathname": "/pricing", + }, + }, + }, + ) + ) + + def test_function_requires_identifier(self): + self.run_function( + inputs=self._inputs(email=""), + ) - assert self.get_mock_fetch_calls()[0][1]["body"]["eventName"] == expected_action + assert not self.get_mock_fetch_calls() + assert self.get_mock_print_calls() == snapshot([("No email set. Skipping...",)]) diff --git a/posthog/cdp/templates/slack/template_slack.py b/posthog/cdp/templates/slack/template_slack.py index b3079485176c6..16bb0383c1c0b 100644 --- a/posthog/cdp/templates/slack/template_slack.py +++ b/posthog/cdp/templates/slack/template_slack.py @@ -24,7 +24,7 @@ } }); -if (res.status != 200 or not res.body.ok) { +if (res.status != 200 or res.body.ok == false) { throw Error(f'Failed to post message to Slack: {res.status}: {res.body}'); } """.strip(), diff --git a/posthog/cdp/templates/test_cdp_templates.py b/posthog/cdp/templates/test_cdp_templates.py index 889f7431e33a1..4c873a9a820ec 100644 --- a/posthog/cdp/templates/test_cdp_templates.py +++ b/posthog/cdp/templates/test_cdp_templates.py @@ -1,5 +1,6 @@ from posthog.cdp.templates import HOG_FUNCTION_TEMPLATES from posthog.cdp.validation import compile_hog, validate_inputs_schema +from posthog.models.hog_functions.hog_function import TYPES_WITH_TRANSPILED_FILTERS from posthog.test.base import BaseTest @@ -9,6 +10,8 @@ def setUp(self): def test_templates_are_valid(self): for template in HOG_FUNCTION_TEMPLATES: - bytecode = compile_hog(template.hog) - assert bytecode[0] == "_H" assert validate_inputs_schema(template.inputs_schema) + + if template.type not in TYPES_WITH_TRANSPILED_FILTERS: + bytecode = compile_hog(template.hog) + assert bytecode[0] == "_H" diff --git a/posthog/cdp/test/test_filters.py b/posthog/cdp/test/test_filters.py index fecb983aa5bf0..b37c015fc16c6 100644 --- a/posthog/cdp/test/test_filters.py +++ b/posthog/cdp/test/test_filters.py @@ -140,6 +140,11 @@ def test_filters_actions(self): ] ) + # Also works if we don't pass the actions dict + expr = hog_function_filters_to_expr(filters={"actions": self.filters["actions"]}, team=self.team, actions={}) + bytecode_2 = create_bytecode(expr).bytecode + assert bytecode == bytecode_2 + def test_filters_properties(self): assert self.filters_to_bytecode(filters={"properties": self.filters["properties"]}) == snapshot( [ diff --git a/posthog/cdp/test/test_site_functions.py b/posthog/cdp/test/test_site_functions.py new file mode 100644 index 0000000000000..9852201353cdc --- /dev/null +++ b/posthog/cdp/test/test_site_functions.py @@ -0,0 +1,241 @@ +from django.test import TestCase +from posthog.cdp.site_functions import get_transpiled_function +from posthog.models.action.action import Action +from posthog.models.organization import Organization +from posthog.models.project import Project +from posthog.models.plugin import TranspilerError +from posthog.models.group_type_mapping import GroupTypeMapping +from posthog.models.user import User + + +class TestSiteFunctions(TestCase): + def setUp(self): + self.organization = Organization.objects.create(name="Test Organization") + self.user = User.objects.create_user(email="testuser@example.com", first_name="Test", password="password") + self.organization.members.add(self.user) + self.project, self.team = Project.objects.create_with_team( + initiating_user=self.user, + organization=self.organization, + name="Test project", + ) + + def test_get_transpiled_function_basic(self): + id = "123" + source = 'export function onLoad() { console.log("Hello, World!"); }' + filters: dict = {} + inputs: dict = {} + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn('console.log("Hello, World!")', result) + self.assertIn(f"window['__$$ph_site_app_{id}_posthog']", result) + + def test_get_transpiled_function_with_static_input(self): + id = "123" + source = "export function onLoad() { console.log(inputs.message); }" + filters: dict = {} + inputs = {"message": {"value": "Hello, Inputs!"}} + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(inputs.message);", result) + self.assertIn("inputs = {", result) + self.assertIn('"message": "Hello, Inputs!"', result) + + def test_get_transpiled_function_with_template_input(self): + id = "123" + source = "export function onLoad() { console.log(inputs.greeting); }" + filters: dict = {} + inputs = {"greeting": {"value": "Hello, {person.properties.name}!"}} + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(inputs.greeting);", result) + # Check that the input processing code is included + self.assertIn("function getInputsKey", result) + self.assertIn('inputs["greeting"] = getInputsKey("greeting");', result) + self.assertIn('case "greeting": return ', result) + self.assertIn('__getGlobal("person")', result) + + def test_get_transpiled_function_with_filters(self): + id = "123" + source = "export function onEvent(event) { console.log(event.event); }" + filters: dict = {"events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}]} + inputs: dict = {} + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(event.event);", result) + self.assertIn("const filterMatches = ", result) + self.assertIn('__getGlobal("event") == "$pageview"', result) + self.assertIn("if (filterMatches) { response.onEvent({", result) + + def test_get_transpiled_function_with_invalid_template_input(self): + id = "123" + source = "export function onLoad() { console.log(inputs.greeting); }" + filters: dict = {} + inputs = {"greeting": {"value": "Hello, {person.properties.nonexistent_property}!"}} + team = self.team + + # This should not raise an exception during transpilation + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(inputs.greeting);", result) + + def test_get_transpiled_function_with_syntax_error_in_source(self): + id = "123" + source = 'export function onLoad() { console.log("Missing closing brace");' + filters: dict = {} + inputs: dict = {} + team = self.team + + with self.assertRaises(TranspilerError): + get_transpiled_function(id, source, filters, inputs, team) + + def test_get_transpiled_function_with_complex_inputs(self): + id = "123" + source = "export function onLoad() { console.log(inputs.complexInput); }" + filters: dict = {} + inputs = { + "complexInput": { + "value": { + "nested": "{event.properties.url}", + "list": ["{person.properties.name}", "{groups.group_name}"], + } + } + } + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(inputs.complexInput);", result) + self.assertIn("function getInputsKey", result) + self.assertIn('inputs["complexInput"] = getInputsKey("complexInput");', result) + + def test_get_transpiled_function_with_empty_inputs(self): + id = "123" + source = 'export function onLoad() { console.log("No inputs"); }' + filters: dict = {} + inputs: dict = {} + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn('console.log("No inputs");', result) + self.assertIn("let inputs = {\n};", result) + + def test_get_transpiled_function_with_non_template_string(self): + id = "123" + source = "export function onLoad() { console.log(inputs.staticMessage); }" + filters: dict = {} + inputs = {"staticMessage": {"value": "This is a static message."}} + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(inputs.staticMessage);", result) + # Since the value does not contain '{', it should be added directly to inputs object + self.assertIn('"staticMessage": "This is a static message."', result) + self.assertNotIn("function getInputsKey", result) + + def test_get_transpiled_function_with_list_inputs(self): + id = "123" + source = "export function onLoad() { console.log(inputs.messages); }" + filters: dict = {} + inputs = {"messages": {"value": ["Hello", "World", "{person.properties.name}"]}} + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(inputs.messages);", result) + self.assertIn("function getInputsKey", result) + self.assertIn('inputs["messages"] = getInputsKey("messages");', result) + + def test_get_transpiled_function_with_event_filter(self): + id = "123" + source = "export function onEvent(event) { console.log(event.properties.url); }" + filters: dict = { + "events": [{"id": "$pageview", "name": "$pageview", "type": "events"}], + "filter_test_accounts": True, + } + inputs: dict = {} + team = self.team + # Assume that team.test_account_filters is set up + team.test_account_filters = [{"key": "email", "value": "@test.com", "operator": "icontains", "type": "person"}] + team.save() + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(event.properties.url);", result) + self.assertIn("const filterMatches = ", result) + self.assertIn('__getGlobal("event") == "$pageview"', result) + self.assertIn( + '(ilike(__getProperty(__getProperty(__getGlobal("person"), "properties", true), "email", true), "%@test.com%")', + result, + ) + + def test_get_transpiled_function_with_groups(self): + id = "123" + source = "export function onLoad() { console.log(inputs.groupInfo); }" + filters: dict = {} + inputs = {"groupInfo": {"value": "{groups['company']}"}} + team = self.team + + # Set up group type mapping + GroupTypeMapping.objects.create(team=team, group_type="company", group_type_index=0, project=self.project) + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(inputs.groupInfo);", result) + self.assertIn('inputs["groupInfo"] = getInputsKey("groupInfo");', result) + self.assertIn('__getProperty(__getGlobal("groups"), "company", false)', result) + + def test_get_transpiled_function_with_missing_group(self): + id = "123" + source = "export function onLoad() { console.log(inputs.groupInfo); }" + filters: dict = {} + inputs = {"groupInfo": {"value": "{groups['nonexistent']}"}} + team = self.team + + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(inputs.groupInfo);", result) + self.assertIn('inputs["groupInfo"] = getInputsKey("groupInfo");', result) + self.assertIn('__getProperty(__getGlobal("groups"), "nonexistent"', result) + + def test_get_transpiled_function_with_complex_filters(self): + action = Action.objects.create(team=self.team, name="Test Action") + action.steps = [{"event": "$pageview", "url": "https://example.com"}] # type: ignore + action.save() + id = "123" + source = "export function onEvent(event) { console.log(event.event); }" + filters: dict = { + "events": [{"id": "$pageview", "name": "$pageview", "type": "events"}], + "actions": [{"id": str(action.pk), "name": "Test Action", "type": "actions"}], + "filter_test_accounts": True, + } + inputs: dict = {} + team = self.team + result = get_transpiled_function(id, source, filters, inputs, team) + + self.assertIsInstance(result, str) + self.assertIn("console.log(event.event);", result) + self.assertIn("const filterMatches = ", result) + self.assertIn('__getGlobal("event") == "$pageview"', result) + self.assertIn("https://example.com", result) diff --git a/posthog/cdp/validation.py b/posthog/cdp/validation.py index 0b2bbe2237dd6..9c71c894e93db 100644 --- a/posthog/cdp/validation.py +++ b/posthog/cdp/validation.py @@ -1,9 +1,12 @@ +import json import logging from typing import Any, Optional from rest_framework import serializers from posthog.hogql.compiler.bytecode import create_bytecode +from posthog.hogql.compiler.javascript import JavaScriptCompiler from posthog.hogql.parser import parse_program, parse_string_template +from posthog.models.hog_functions.hog_function import TYPES_WITH_JAVASCRIPT_SOURCE logger = logging.getLogger(__name__) @@ -23,6 +26,31 @@ def generate_template_bytecode(obj: Any) -> Any: return obj +def transpile_template_code(obj: Any, compiler: JavaScriptCompiler) -> str: + """ + Clones an object, compiling any string values to bytecode templates + """ + if isinstance(obj, dict): + return ( + "{" + + ( + ", ".join( + [ + f"{json.dumps(str(key))}: {transpile_template_code(value, compiler)}" + for key, value in obj.items() + ] + ) + ) + + "}" + ) + elif isinstance(obj, list): + return "[" + (", ".join([transpile_template_code(item, compiler) for item in obj])) + "]" + elif isinstance(obj, str): + return compiler.visit(parse_string_template(obj)) + else: + return json.dumps(obj) + + class InputsSchemaItemSerializer(serializers.Serializer): type = serializers.ChoiceField( choices=["string", "boolean", "dictionary", "choice", "json", "integration", "integration_field", "email"] @@ -55,6 +83,7 @@ class InputsItemSerializer(serializers.Serializer): def validate(self, attrs): schema = self.context["schema"] + function_type = self.context["function_type"] value = attrs.get("value") name: str = schema["key"] @@ -96,7 +125,16 @@ def validate(self, attrs): # We want to exclude the "design" property value = {key: value[key] for key in value if key != "design"} - attrs["bytecode"] = generate_template_bytecode(value) + if function_type in TYPES_WITH_JAVASCRIPT_SOURCE: + compiler = JavaScriptCompiler() + code = transpile_template_code(value, compiler) + attrs["transpiled"] = {"lang": "ts", "code": code, "stl": list(compiler.stl_functions)} + if "bytecode" in attrs: + del attrs["bytecode"] + else: + attrs["bytecode"] = generate_template_bytecode(value) + if "transpiled" in attrs: + del attrs["transpiled"] except Exception as e: raise serializers.ValidationError({"inputs": {name: f"Invalid template: {str(e)}"}}) @@ -115,7 +153,12 @@ def validate_inputs_schema(value: list) -> list: return serializer.validated_data or [] -def validate_inputs(inputs_schema: list, inputs: dict, existing_secret_inputs: Optional[dict] = None) -> dict: +def validate_inputs( + inputs_schema: list, + inputs: dict, + existing_secret_inputs: Optional[dict] = None, + function_type: Optional[str] = None, +) -> dict: """ Tricky: We want to allow overriding the secret inputs, but not return them. If we have a given input then we use it, otherwise we pull it from the existing secrets @@ -129,7 +172,9 @@ def validate_inputs(inputs_schema: list, inputs: dict, existing_secret_inputs: O if schema.get("secret") and existing_secret_inputs and value and value.get("secret"): value = existing_secret_inputs.get(schema["key"]) or {} - serializer = InputsItemSerializer(data=value, context={"schema": schema}) + serializer = InputsItemSerializer( + data=value, context={"schema": schema, "function_type": function_type or "destination"} + ) if not serializer.is_valid(): raise serializers.ValidationError(serializer.errors) diff --git a/posthog/clickhouse/cluster.py b/posthog/clickhouse/cluster.py new file mode 100644 index 0000000000000..3aa67c94ff3b5 --- /dev/null +++ b/posthog/clickhouse/cluster.py @@ -0,0 +1,125 @@ +from __future__ import annotations + +import logging +from collections.abc import Callable, Iterator, Sequence +from concurrent.futures import ALL_COMPLETED, FIRST_EXCEPTION, Future, ThreadPoolExecutor, as_completed +from typing import Literal, NamedTuple, TypeVar + +from clickhouse_driver import Client +from clickhouse_pool import ChPool +from django.conf import settings + +from posthog.clickhouse.client.connection import make_ch_pool + + +logger = logging.getLogger(__name__) + + +K = TypeVar("K") +V = TypeVar("V") + + +class FuturesMap(dict[K, Future[V]]): + def as_completed(self, timeout: float | int | None = None) -> Iterator[tuple[K, Future[V]]]: + reverse_map = {v: k for k, v in self.items()} + assert len(reverse_map) == len(self) + + for f in as_completed(self.values(), timeout=timeout): + yield reverse_map[f], f + + def result( + self, + timeout: float | int | None = None, + return_when: Literal["FIRST_EXCEPTION", "ALL_COMPLETED"] = ALL_COMPLETED, + ) -> dict[K, V]: + results = {} + errors = {} + for k, future in self.as_completed(timeout=timeout): + try: + results[k] = future.result() + except Exception as e: + if return_when is FIRST_EXCEPTION: + raise + else: + errors[k] = e + + if errors: + # TODO: messaging could be improved here + raise ExceptionGroup("not all futures returned a result", [*errors.values()]) + + return results + + +class ConnectionInfo(NamedTuple): + address: str + port: int + + +class HostInfo(NamedTuple): + connection_info: ConnectionInfo + shard_num: int | None + replica_num: int | None + + +T = TypeVar("T") + + +class ClickhouseCluster: + def __init__(self, bootstrap_client: Client, extra_hosts: Sequence[ConnectionInfo] | None = None) -> None: + self.__hosts = [ + HostInfo(ConnectionInfo(host_address, port), shard_num, replica_num) + for (host_address, port, shard_num, replica_num) in bootstrap_client.execute( + """ + SELECT host_address, port, shard_num, replica_num + FROM system.clusters + WHERE name = %(name)s + ORDER BY shard_num, replica_num + """, + {"name": settings.CLICKHOUSE_CLUSTER}, + ) + ] + if extra_hosts is not None: + self.__hosts.extend( + [HostInfo(connection_info, shard_num=None, replica_num=None) for connection_info in extra_hosts] + ) + self.__pools: dict[HostInfo, ChPool] = {} + + def __get_task_function(self, host: HostInfo, fn: Callable[[Client], T]) -> Callable[[], T]: + pool = self.__pools.get(host) + if pool is None: + pool = self.__pools[host] = make_ch_pool(host=host.connection_info.address, port=host.connection_info.port) + + def task(): + with pool.get_client() as client: + logger.debug("Executing %r on %r...", fn, host) + try: + result = fn(client) + except Exception: + logger.exception("Failed to execute %r on %r!", fn, host) + raise + else: + logger.debug("Successfully executed %r on %r.", fn, host) + return result + + return task + + def map_all_hosts(self, fn: Callable[[Client], T]) -> FuturesMap[HostInfo, T]: + """ + Execute the callable once for each host in the cluster. + """ + with ThreadPoolExecutor() as executor: + return FuturesMap({host: executor.submit(self.__get_task_function(host, fn)) for host in self.__hosts}) + + def map_one_host_per_shard(self, fn: Callable[[Client], T]) -> FuturesMap[HostInfo, T]: + """ + Execute the callable once for each shard in the cluster. + """ + shard_hosts: dict[int, HostInfo] = {} + for host in self.__hosts: + if host.shard_num is not None and host.shard_num not in shard_hosts: + shard_hosts[host.shard_num] = host + + with ThreadPoolExecutor() as executor: + return FuturesMap( + {host: executor.submit(self.__get_task_function(host, fn)) for host in shard_hosts.values()} + ) diff --git a/posthog/hogql/compiler/javascript.py b/posthog/hogql/compiler/javascript.py index 6d27567fa11a4..a70b9eeb54a1f 100644 --- a/posthog/hogql/compiler/javascript.py +++ b/posthog/hogql/compiler/javascript.py @@ -75,12 +75,14 @@ class Local: def to_js_program(code: str) -> str: compiler = JavaScriptCompiler() code = compiler.visit(parse_program(code)) - imports = compiler.get_inlined_stl() + imports = compiler.get_stl_code() return imports + ("\n\n" if imports else "") + code -def to_js_expr(expr: str) -> str: - return JavaScriptCompiler().visit(parse_expr(expr)) +def to_js_expr(expr: str | ast.Expr) -> str: + if isinstance(expr, str): + expr = parse_expr(expr) + return JavaScriptCompiler().visit(expr) def _as_block(node: ast.Statement) -> ast.Block: @@ -113,14 +115,14 @@ def __init__( self.scope_depth = 0 self.args = args or [] self.indent_level = 0 - self.inlined_stl: set[str] = set() + self.stl_functions: set[str] = set() # Initialize locals with function arguments for arg in self.args: self._declare_local(arg) - def get_inlined_stl(self) -> str: - return import_stl_functions(self.inlined_stl) + def get_stl_code(self) -> str: + return import_stl_functions(self.stl_functions) def _start_scope(self): self.scope_depth += 1 @@ -172,28 +174,28 @@ def visit_compare_operation(self, node: ast.CompareOperation): elif op == ast.CompareOperationOp.NotIn: return f"(!{right_code}.includes({left_code}))" elif op == ast.CompareOperationOp.Like: - self.inlined_stl.add("like") + self.stl_functions.add("like") return f"like({left_code}, {right_code})" elif op == ast.CompareOperationOp.ILike: - self.inlined_stl.add("ilike") + self.stl_functions.add("ilike") return f"ilike({left_code}, {right_code})" elif op == ast.CompareOperationOp.NotLike: - self.inlined_stl.add("like") + self.stl_functions.add("like") return f"!like({left_code}, {right_code})" elif op == ast.CompareOperationOp.NotILike: - self.inlined_stl.add("ilike") + self.stl_functions.add("ilike") return f"!ilike({left_code}, {right_code})" elif op == ast.CompareOperationOp.Regex: - self.inlined_stl.add("match") + self.stl_functions.add("match") return f"match({left_code}, {right_code})" elif op == ast.CompareOperationOp.IRegex: - self.inlined_stl.add("__imatch") + self.stl_functions.add("__imatch") return f"__imatch({left_code}, {right_code})" elif op == ast.CompareOperationOp.NotRegex: - self.inlined_stl.add("match") + self.stl_functions.add("match") return f"!match({left_code}, {right_code})" elif op == ast.CompareOperationOp.NotIRegex: - self.inlined_stl.add("__imatch") + self.stl_functions.add("__imatch") return f"!__imatch({left_code}, {right_code})" elif op == ast.CompareOperationOp.InCohort or op == ast.CompareOperationOp.NotInCohort: cohort_name = "" @@ -229,14 +231,14 @@ def visit_field(self, node: ast.Field): if found_local: array_code = _sanitize_identifier(element) elif element in STL_FUNCTIONS: - self.inlined_stl.add(str(element)) + self.stl_functions.add(str(element)) array_code = f"{_sanitize_identifier(element)}" else: array_code = f"{_JS_GET_GLOBAL}({json.dumps(element)})" continue if (isinstance(element, int) and not isinstance(element, bool)) or isinstance(element, str): - self.inlined_stl.add("__getProperty") + self.stl_functions.add("__getProperty") array_code = f"__getProperty({array_code}, {json.dumps(element)}, true)" else: raise QueryError(f"Unsupported element: {element} ({type(element)})") @@ -245,13 +247,13 @@ def visit_field(self, node: ast.Field): def visit_tuple_access(self, node: ast.TupleAccess): tuple_code = self.visit(node.tuple) index_code = str(node.index) - self.inlined_stl.add("__getProperty") + self.stl_functions.add("__getProperty") return f"__getProperty({tuple_code}, {index_code}, {json.dumps(node.nullish)})" def visit_array_access(self, node: ast.ArrayAccess): array_code = self.visit(node.array) property_code = self.visit(node.property) - self.inlined_stl.add("__getProperty") + self.stl_functions.add("__getProperty") return f"__getProperty({array_code}, {property_code}, {json.dumps(node.nullish)})" def visit_constant(self, node: ast.Constant): @@ -307,7 +309,7 @@ def build_nested_if(args): return f"({expr_code} ?? {if_null_code})" if node.name in STL_FUNCTIONS: - self.inlined_stl.add(node.name) + self.stl_functions.add(node.name) name = _sanitize_identifier(node.name) args_code = ", ".join(self.visit(arg) for arg in node.args) return f"{name}({args_code})" @@ -422,7 +424,7 @@ def visit_for_in_statement(self, node: ast.ForInStatement): self._declare_local(node.keyVar) self._declare_local(node.valueVar) body_code = self.visit(_as_block(node.body)) - self.inlined_stl.add("keys") + self.stl_functions.add("keys") resp = f"for (let {_sanitize_identifier(node.keyVar)} of keys({expr_code})) {{ let {_sanitize_identifier(node.valueVar)} = {expr_code}[{_sanitize_identifier(node.keyVar)}]; {body_code} }}" self._end_scope() return resp @@ -430,7 +432,7 @@ def visit_for_in_statement(self, node: ast.ForInStatement): self._start_scope() self._declare_local(node.valueVar) body_code = self.visit(_as_block(node.body)) - self.inlined_stl.add("values") + self.stl_functions.add("values") resp = f"for (let {_sanitize_identifier(node.valueVar)} of values({expr_code})) {body_code}" self._end_scope() return resp @@ -450,14 +452,14 @@ def visit_variable_assignment(self, node: ast.VariableAssignment): tuple_code = self.visit(node.left.tuple) index = node.left.index right_code = self.visit(node.right) - self.inlined_stl.add("__setProperty") + self.stl_functions.add("__setProperty") return f"__setProperty({tuple_code}, {index}, {right_code});" elif isinstance(node.left, ast.ArrayAccess): array_code = self.visit(node.left.array) property_code = self.visit(node.left.property) right_code = self.visit(node.right) - self.inlined_stl.add("__setProperty") + self.stl_functions.add("__setProperty") return f"__setProperty({array_code}, {property_code}, {right_code});" elif isinstance(node.left, ast.Field): @@ -475,10 +477,10 @@ def visit_variable_assignment(self, node: ast.VariableAssignment): elif (isinstance(element, int) and not isinstance(element, bool)) or isinstance(element, str): if index == len(chain) - 1: right_code = self.visit(node.right) - self.inlined_stl.add("__setProperty") + self.stl_functions.add("__setProperty") array_code = f"__setProperty({array_code}, {json.dumps(element)}, {right_code})" else: - self.inlined_stl.add("__getProperty") + self.stl_functions.add("__getProperty") array_code = f"__getProperty({array_code}, {json.dumps(element)}, true)" else: raise QueryError(f"Unsupported element: {element} ({type(element)})") @@ -518,7 +520,7 @@ def visit_lambda(self, node: ast.Lambda): else: expr_code = self.visit(node.expr) self._end_scope() - self.inlined_stl.add("__lambda") + self.stl_functions.add("__lambda") # we wrap it in __lambda() to make the function anonymous (a true lambda without a name) return f"__lambda(({params_code}) => {expr_code})" @@ -539,7 +541,7 @@ def visit_array(self, node: ast.Array): def visit_tuple(self, node: ast.Tuple): items_code = ", ".join([self.visit(expr) for expr in node.exprs]) - self.inlined_stl.add("tuple") + self.stl_functions.add("tuple") return f"tuple({items_code})" def visit_hogqlx_tag(self, node: ast.HogQLXTag): diff --git a/posthog/hogql/compiler/test/test_javascript.py b/posthog/hogql/compiler/test/test_javascript.py index cf577ff25d41b..c23707701c4ff 100644 --- a/posthog/hogql/compiler/test/test_javascript.py +++ b/posthog/hogql/compiler/test/test_javascript.py @@ -113,10 +113,10 @@ def test_visit_lambda(self): code = to_js_expr("x -> x + 1") self.assertTrue(code.startswith("__lambda((x) => (x + 1))")) - def test_inlined_stl(self): + def test_stl_code(self): compiler = JavaScriptCompiler() - compiler.inlined_stl.add("concat") - stl_code = compiler.get_inlined_stl() + compiler.stl_functions.add("concat") + stl_code = compiler.get_stl_code() self.assertIn("function concat", stl_code) def test_sanitize_keywords(self): diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index 5e64111632997..01b09360a413f 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -1,5 +1,6 @@ from datetime import date, datetime from enum import StrEnum +import sys from typing import Optional, Literal, TypeAlias from uuid import UUID from pydantic import ConfigDict, BaseModel @@ -53,6 +54,7 @@ class LimitContext(StrEnum): EXPORT = "export" COHORT_CALCULATION = "cohort_calculation" HEATMAPS = "heatmaps" + SAVED_QUERY = "saved_query" def get_max_limit_for_context(limit_context: LimitContext) -> int: @@ -62,6 +64,8 @@ def get_max_limit_for_context(limit_context: LimitContext) -> int: return MAX_SELECT_HEATMAPS_LIMIT # 1M elif limit_context == LimitContext.COHORT_CALCULATION: return MAX_SELECT_COHORT_CALCULATION_LIMIT # 1b + elif limit_context == LimitContext.SAVED_QUERY: + return sys.maxsize # Max python int else: raise ValueError(f"Unexpected LimitContext value: {limit_context}") @@ -76,6 +80,8 @@ def get_default_limit_for_context(limit_context: LimitContext) -> int: return MAX_SELECT_HEATMAPS_LIMIT # 1M elif limit_context == LimitContext.COHORT_CALCULATION: return MAX_SELECT_COHORT_CALCULATION_LIMIT # 1b + elif limit_context == LimitContext.SAVED_QUERY: + return sys.maxsize # Max python int else: raise ValueError(f"Unexpected LimitContext value: {limit_context}") @@ -94,6 +100,7 @@ class HogQLQuerySettings(BaseModel): optimize_aggregation_in_order: Optional[bool] = None date_time_output_format: Optional[str] = None date_time_input_format: Optional[str] = None + join_algorithm: Optional[str] = None # Settings applied on top of all HogQL queries. diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index 9a990b518a7d3..94f9e1729ac41 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -409,7 +409,9 @@ def define_mappings(warehouse: dict[str, Table], get_table: Callable): from_field=from_field, to_field=to_field, join_table=joining_table, - join_function=join.join_function(), + join_function=join.join_function_for_experiments() + if "events" == join.joining_table_name and join.configuration.get("experiments_optimized") + else join.join_function(), ) if join.source_table_name == "persons": diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index 9eb0980b9d933..d0f4755c67bad 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -1007,6 +1007,7 @@ def compare_types(arg_types: list[ConstantType], sig_arg_types: tuple[ConstantTy "argMaxMerge": HogQLFunctionMeta("argMaxMerge", 1, 1, aggregate=True), "avgState": HogQLFunctionMeta("avgState", 1, 1, aggregate=True), "avgMerge": HogQLFunctionMeta("avgMerge", 1, 1, aggregate=True), + "avgMergeIf": HogQLFunctionMeta("avgMergeIf", 2, 2, aggregate=True), "avgWeighted": HogQLFunctionMeta("avgWeighted", 2, 2, aggregate=True), "avgWeightedIf": HogQLFunctionMeta("avgWeightedIf", 3, 3, aggregate=True), "avgArray": HogQLFunctionMeta("avgArrayOrNull", 1, 1, aggregate=True), diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index dee9988d97c0c..37fea932f2014 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -23,7 +23,7 @@ ) from posthog.hogql.context import HogQLContext from posthog.hogql.database.models import Table, FunctionCallTable, SavedQuery -from posthog.hogql.database.database import Database, create_hogql_database +from posthog.hogql.database.database import create_hogql_database from posthog.hogql.database.s3_table import S3Table from posthog.hogql.errors import ImpossibleASTError, InternalHogQLError, QueryError, ResolutionError from posthog.hogql.escape_sql import ( @@ -66,9 +66,7 @@ def team_id_guard_for_table(table_type: Union[ast.TableType, ast.TableAliasType] ) -def to_printed_hogql( - query: ast.Expr, team: Team, modifiers: Optional[HogQLQueryModifiers] = None, database: Optional["Database"] = None -) -> str: +def to_printed_hogql(query: ast.Expr, team: Team, modifiers: Optional[HogQLQueryModifiers] = None) -> str: """Prints the HogQL query without mutating the node""" return print_ast( clone_expr(query), @@ -77,7 +75,6 @@ def to_printed_hogql( team_id=team.pk, enable_select_queries=True, modifiers=create_default_modifiers_for_team(team, modifiers), - database=database, ), pretty=True, ) diff --git a/posthog/hogql_queries/error_tracking_query_runner.py b/posthog/hogql_queries/error_tracking_query_runner.py index 20eebdeb1ac2c..c6c2231d366b1 100644 --- a/posthog/hogql_queries/error_tracking_query_runner.py +++ b/posthog/hogql_queries/error_tracking_query_runner.py @@ -29,6 +29,7 @@ def __init__(self, *args, **kwargs): self.paginator = HogQLHasMorePaginator.from_limit_context( limit_context=LimitContext.QUERY, limit=self.query.limit if self.query.limit else None, + offset=self.query.offset, ) def to_query(self) -> ast.SelectQuery: @@ -184,11 +185,11 @@ def order_by(self): return ( [ ast.OrderExpr( - expr=ast.Field(chain=[self.query.order]), - order="ASC" if self.query.order == "first_seen" else "DESC", + expr=ast.Field(chain=[self.query.orderBy]), + order="ASC" if self.query.orderBy == "first_seen" else "DESC", ) ] - if self.query.order + if self.query.orderBy else None ) diff --git a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py index f5cabcbe74ecc..69e12f49bbd13 100644 --- a/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py +++ b/posthog/hogql_queries/experiments/experiment_funnels_query_runner.py @@ -55,16 +55,19 @@ def calculate(self) -> ExperimentFunnelsQueryResponse: self._validate_event_variants(funnels_result) - # Filter results to only include valid variants in the first step - funnels_result.results = [ - result for result in funnels_result.results if result[0]["breakdown_value"][0] in self.variants - ] - - # Statistical analysis - control_variant, test_variants = self._get_variants_with_base_stats(funnels_result) - probabilities = calculate_probabilities(control_variant, test_variants) - significance_code, loss = are_results_significant(control_variant, test_variants, probabilities) - credible_intervals = calculate_credible_intervals([control_variant, *test_variants]) + try: + # Filter results to only include valid variants in the first step + funnels_result.results = [ + result for result in funnels_result.results if result[0]["breakdown_value"][0] in self.variants + ] + + # Statistical analysis + control_variant, test_variants = self._get_variants_with_base_stats(funnels_result) + probabilities = calculate_probabilities(control_variant, test_variants) + significance_code, loss = are_results_significant(control_variant, test_variants, probabilities) + credible_intervals = calculate_credible_intervals([control_variant, *test_variants]) + except Exception as e: + raise ValueError(f"Error calculating experiment funnel results: {str(e)}") from e return ExperimentFunnelsQueryResponse( kind="ExperimentFunnelsQuery", diff --git a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py index 5f5a93a84cbdb..06619c4dfeee1 100644 --- a/posthog/hogql_queries/experiments/experiment_trends_query_runner.py +++ b/posthog/hogql_queries/experiments/experiment_trends_query_runner.py @@ -3,9 +3,6 @@ from django.conf import settings from posthog.constants import ExperimentNoResultsErrorKeys from posthog.hogql import ast -from posthog.hogql.context import HogQLContext -from posthog.hogql.database.database import create_hogql_database -from posthog.hogql.database.models import LazyJoin from posthog.hogql_queries.experiments import CONTROL_VARIANT_KEY from posthog.hogql_queries.experiments.trends_statistics import ( are_results_significant, @@ -37,7 +34,7 @@ TrendsQuery, TrendsQueryResponse, ) -from typing import Any, Optional, cast +from typing import Any, Optional import threading @@ -255,86 +252,7 @@ def calculate(self) -> ExperimentTrendsQueryResponse: def run(query_runner: TrendsQueryRunner, result_key: str, is_parallel: bool): try: - # Create a new database instance where we can attach our - # custom join to the events table. It will be passed through - # and used by the query runner. - database = create_hogql_database(team_id=self.team.pk) - if self._is_data_warehouse_query(query_runner.query): - series_node = cast(DataWarehouseNode, query_runner.query.series[0]) - table = database.get_table(series_node.table_name) - table.fields["events"] = LazyJoin( - from_field=[series_node.distinct_id_field], - join_table=database.get_table("events"), - join_function=lambda join_to_add, context, node: ( - ast.JoinExpr( - table=ast.SelectQuery( - select=[ - ast.Alias(alias=name, expr=ast.Field(chain=["events", *chain])) - for name, chain in { - **join_to_add.fields_accessed, - "timestamp": ["timestamp"], - "distinct_id": ["distinct_id"], - "properties": ["properties"], - }.items() - ], - select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), - ), - # ASOF JOIN finds the most recent matching event that occurred at or before each data warehouse timestamp. - # - # Why this matters: - # When a user performs an action (recorded in data warehouse), we want to know which - # experiment variant they were assigned at that moment. The most recent $feature_flag_called - # event before their action represents their active variant assignment. - # - # Example: - # Data Warehouse: timestamp=2024-01-03 12:00, distinct_id=user1 - # Events: - # 2024-01-02: (user1, variant='control') <- This event will be joined - # 2024-01-03: (user1, variant='test') <- Ignored (occurs after data warehouse timestamp) - # - # This ensures we capture the correct causal relationship: which experiment variant - # was the user assigned to when they performed the action? - join_type="ASOF LEFT JOIN", - alias=join_to_add.to_table, - constraint=ast.JoinConstraint( - expr=ast.And( - exprs=[ - ast.CompareOperation( - left=ast.Field(chain=[join_to_add.to_table, "event"]), - op=ast.CompareOperationOp.Eq, - right=ast.Constant(value="$feature_flag_called"), - ), - ast.CompareOperation( - left=ast.Field( - chain=[ - join_to_add.from_table, - series_node.distinct_id_field, - ] - ), - op=ast.CompareOperationOp.Eq, - right=ast.Field(chain=[join_to_add.to_table, "distinct_id"]), - ), - ast.CompareOperation( - left=ast.Field( - chain=[ - join_to_add.from_table, - series_node.timestamp_field, - ] - ), - op=ast.CompareOperationOp.GtEq, - right=ast.Field(chain=[join_to_add.to_table, "timestamp"]), - ), - ] - ), - constraint_type="ON", - ), - ) - ), - ) - - context = HogQLContext(team_id=self.team.pk, database=database) - - result = query_runner.calculate(context=context) + result = query_runner.calculate() shared_results[result_key] = result except Exception as e: errors.append(e) diff --git a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py index 5645566a954aa..8837bfeab8607 100644 --- a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py +++ b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py @@ -1,5 +1,4 @@ from django.test import override_settings -from posthog.hogql.errors import QueryError from posthog.hogql_queries.experiments.experiment_trends_query_runner import ExperimentTrendsQueryRunner from posthog.models.experiment import Experiment, ExperimentHoldout from posthog.models.feature_flag.feature_flag import FeatureFlag @@ -34,6 +33,7 @@ from boto3 import resource from botocore.config import Config from posthog.warehouse.models.credential import DataWarehouseCredential +from posthog.warehouse.models.join import DataWarehouseJoin from posthog.warehouse.models.table import DataWarehouseTable TEST_BUCKET = "test_storage_bucket-posthog.hogql.datawarehouse.trendquery" + XDIST_SUFFIX @@ -137,7 +137,7 @@ def create_data_warehouse_table_with_payments(self): ) distinct_id = pa.array(["user_control_0", "user_test_1", "user_test_2", "user_test_3", "user_extra"]) amount = pa.array([100, 50, 75, 80, 90]) - names = ["id", "timestamp", "distinct_id", "amount"] + names = ["id", "dw_timestamp", "dw_distinct_id", "amount"] pq.write_to_dataset( pa.Table.from_arrays([id, timestamp, distinct_id, amount], names=names), @@ -163,12 +163,22 @@ def create_data_warehouse_table_with_payments(self): team=self.team, columns={ "id": "String", - "timestamp": "DateTime64(3, 'UTC')", - "distinct_id": "String", + "dw_timestamp": "DateTime64(3, 'UTC')", + "dw_distinct_id": "String", "amount": "Int64", }, credential=credential, ) + + DataWarehouseJoin.objects.create( + team=self.team, + source_table_name=table_name, + source_table_key="dw_distinct_id", + joining_table_name="events", + joining_table_key="distinct_id", + field_name="events", + configuration={"experiments_optimized": True, "experiments_timestamp_key": "dw_timestamp"}, + ) return table_name @freeze_time("2020-01-01T12:00:00Z") @@ -494,10 +504,10 @@ def test_query_runner_with_data_warehouse_series(self): series=[ DataWarehouseNode( id=table_name, - distinct_id_field="distinct_id", - id_field="distinct_id", + distinct_id_field="dw_distinct_id", + id_field="id", table_name=table_name, - timestamp_field="timestamp", + timestamp_field="dw_timestamp", ) ] ) @@ -587,10 +597,10 @@ def test_query_runner_with_invalid_data_warehouse_table_name(self): series=[ DataWarehouseNode( id=table_name, - distinct_id_field="distinct_id", - id_field="distinct_id", + distinct_id_field="dw_distinct_id", + id_field="id", table_name=table_name, - timestamp_field="timestamp", + timestamp_field="dw_timestamp", ) ] ) @@ -610,10 +620,10 @@ def test_query_runner_with_invalid_data_warehouse_table_name(self): query=ExperimentTrendsQuery(**experiment.metrics[0]["query"]), team=self.team ) with freeze_time("2023-01-07"): - with self.assertRaises(QueryError) as context: + with self.assertRaises(KeyError) as context: query_runner.calculate() - self.assertEqual(str(context.exception), 'Unknown table "invalid_table_name".') + self.assertEqual(str(context.exception), "'invalid_table_name'") @freeze_time("2020-01-01T12:00:00Z") def test_query_runner_with_avg_math(self): diff --git a/posthog/hogql_queries/insights/funnels/base.py b/posthog/hogql_queries/insights/funnels/base.py index 75815c634bb08..1c7fb05d13f69 100644 --- a/posthog/hogql_queries/insights/funnels/base.py +++ b/posthog/hogql_queries/insights/funnels/base.py @@ -37,6 +37,8 @@ ) from posthog.types import EntityNode, ExclusionEntityNode +JOIN_ALGOS = "direct,parallel_hash,hash,full_sorting_merge" + class FunnelBase(ABC): context: FunnelQueryContext diff --git a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py index d3a372af506a7..40d15aae68a69 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py +++ b/posthog/hogql_queries/insights/funnels/funnel_trends_udf.py @@ -6,6 +6,7 @@ from posthog.hogql.constants import HogQLQuerySettings from posthog.hogql.parser import parse_select, parse_expr from posthog.hogql_queries.insights.funnels import FunnelTrends +from posthog.hogql_queries.insights.funnels.base import JOIN_ALGOS from posthog.hogql_queries.insights.utils.utils import get_start_of_interval_hogql_str from posthog.schema import BreakdownType, BreakdownAttributionType from posthog.utils import DATERANGE_MAP, relative_date_parse @@ -195,7 +196,9 @@ def get_query(self) -> ast.SelectQuery: """, {"fill_query": fill_query, "inner_select": inner_select}, ) - return cast(ast.SelectQuery, s) + s = cast(ast.SelectQuery, s) + s.settings = HogQLQuerySettings(join_algorithm=JOIN_ALGOS) + return s def _matching_events(self): if ( @@ -254,4 +257,5 @@ def actor_query( select_from=select_from, order_by=order_by, where=where, + settings=HogQLQuerySettings(join_algorithm=JOIN_ALGOS), ) diff --git a/posthog/hogql_queries/insights/funnels/funnel_udf.py b/posthog/hogql_queries/insights/funnels/funnel_udf.py index ac4fda03069d3..1bc07d685da02 100644 --- a/posthog/hogql_queries/insights/funnels/funnel_udf.py +++ b/posthog/hogql_queries/insights/funnels/funnel_udf.py @@ -1,9 +1,9 @@ from typing import cast, Optional from posthog.hogql import ast -from posthog.hogql.constants import DEFAULT_RETURNED_ROWS +from posthog.hogql.constants import DEFAULT_RETURNED_ROWS, HogQLQuerySettings from posthog.hogql.parser import parse_select, parse_expr -from posthog.hogql_queries.insights.funnels.base import FunnelBase +from posthog.hogql_queries.insights.funnels.base import FunnelBase, JOIN_ALGOS from posthog.schema import BreakdownType, BreakdownAttributionType from posthog.utils import DATERANGE_MAP @@ -169,8 +169,10 @@ def get_query(self) -> ast.SelectQuery: ) # Weird: unless you reference row_number in this outer block, it doesn't work correctly - s = parse_select( - f""" + s = cast( + ast.SelectQuery, + parse_select( + f""" SELECT {step_results2}, {mean_conversion_times}, @@ -182,10 +184,11 @@ def get_query(self) -> ast.SelectQuery: GROUP BY final_prop LIMIT {self.get_breakdown_limit() + 1 if use_breakdown_limit else DEFAULT_RETURNED_ROWS} """, - {"s": s}, + {"s": s}, + ), ) - - return cast(ast.SelectQuery, s) + s.settings = HogQLQuerySettings(join_algorithm=JOIN_ALGOS) + return s def _get_funnel_person_step_condition(self) -> ast.Expr: actorsQuery, breakdownType = ( @@ -294,4 +297,5 @@ def actor_query( select_from=select_from, order_by=order_by, where=where, + settings=HogQLQuerySettings(join_algorithm=JOIN_ALGOS), ) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr index 3f3fd82910546..bc580c8b79bd9 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr @@ -60,7 +60,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed']), equals(event.event, 'insight loaded'), ifNull(equals(funnel_actors.steps, 2), 0)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -99,7 +99,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed']), equals(event.event, 'insight loaded'), ifNull(equals(funnel_actors.steps, 2), 0)) GROUP BY actor_id ORDER BY actor_id ASC) AS source))) @@ -192,7 +192,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed', 'insight updated']), equals(event.event, 'insight loaded'), ifNull(notEquals(funnel_actors.steps, 3), 1)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -231,7 +231,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2021-01-08 23:59:59', 6, 'UTC')))), notIn(event.event, ['$pageview', 'insight analyzed', 'insight updated']), equals(event.event, 'insight loaded'), ifNull(notEquals(funnel_actors.steps, 3), 1)) GROUP BY actor_id ORDER BY actor_id ASC) AS source))) @@ -325,7 +325,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -366,7 +366,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr index 00797c892c6b5..1ef2e20dafbba 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr @@ -56,7 +56,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(if(not(empty(event__override.distinct_id)), event__override.person_id, event.person_id), funnel_actors.actor_id) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) GROUP BY name LIMIT 100 @@ -108,7 +108,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -171,7 +171,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors JOIN (SELECT persons.id AS id, persons.properties AS person_props @@ -236,7 +236,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -309,7 +309,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -350,7 +350,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) @@ -444,7 +444,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -485,7 +485,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) @@ -579,7 +579,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -620,7 +620,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) @@ -714,7 +714,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -755,7 +755,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) @@ -839,7 +839,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors JOIN (SELECT persons.id AS id, persons.properties AS person_props @@ -904,7 +904,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -977,7 +977,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -1018,7 +1018,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) @@ -1112,7 +1112,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -1153,7 +1153,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) @@ -1247,7 +1247,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -1288,7 +1288,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) @@ -1382,7 +1382,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -1423,7 +1423,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source))) @@ -1502,7 +1502,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_1`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_1`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), in(event.event, ['positively_related', 'negatively_related']))) GROUP BY name, prop @@ -1549,7 +1549,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -1607,7 +1607,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_1`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_1`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), in(event.event, ['positively_related', 'negatively_related']))) GROUP BY name, prop @@ -1654,7 +1654,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -1707,7 +1707,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) GROUP BY name LIMIT 100 @@ -1752,7 +1752,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -1807,7 +1807,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -1874,7 +1874,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -1941,7 +1941,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2008,7 +2008,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2081,7 +2081,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) GROUP BY name LIMIT 100 @@ -2134,7 +2134,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -2189,7 +2189,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2256,7 +2256,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2321,7 +2321,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) GROUP BY name LIMIT 100 @@ -2366,7 +2366,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -2421,7 +2421,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2488,7 +2488,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'positively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2555,7 +2555,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2622,7 +2622,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2695,7 +2695,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), notIn(event.event, [])) GROUP BY name LIMIT 100 @@ -2748,7 +2748,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -2803,7 +2803,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(equals(funnel_actors.steps, 2), 0)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2870,7 +2870,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors ON equals(funnel_actors.actor_id, event.`$group_0`) WHERE and(equals(event.team_id, 99999), greaterOrEquals(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-01 00:00:00', 6, 'UTC'))), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC'))), equals(event.team_id, 99999), greater(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), funnel_actors.first_timestamp), less(toTimeZone(toDateTime(toTimeZone(event.timestamp, 'UTC'), 'UTC'), 'UTC'), coalesce(funnel_actors.final_timestamp, plus(toTimeZone(funnel_actors.first_timestamp, 'UTC'), toIntervalDay(14)), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-14 23:59:59', 6, 'UTC')))), notIn(event.event, ['paid', 'user signed up']), equals(event.event, 'negatively_related'), ifNull(notEquals(funnel_actors.steps, 2), 1)) GROUP BY actor_id ORDER BY actor_id ASC) AS source @@ -2938,7 +2938,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -2995,7 +2995,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -3057,7 +3057,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -3131,7 +3131,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -3205,7 +3205,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -3279,7 +3279,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -3347,7 +3347,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -3404,7 +3404,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -3460,7 +3460,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -3517,7 +3517,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -3579,7 +3579,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -3653,7 +3653,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -3727,7 +3727,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -3801,7 +3801,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -3869,7 +3869,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -3926,7 +3926,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -3982,7 +3982,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -4039,7 +4039,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -4101,7 +4101,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -4175,7 +4175,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -4249,7 +4249,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -4323,7 +4323,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -4391,7 +4391,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -4448,7 +4448,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -4504,7 +4504,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -4561,7 +4561,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -4623,7 +4623,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -4697,7 +4697,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -4771,7 +4771,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -4845,7 +4845,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -4913,7 +4913,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -4970,7 +4970,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -5026,7 +5026,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -5083,7 +5083,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -5145,7 +5145,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -5219,7 +5219,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -5293,7 +5293,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(equals(funnel_actors.steps, 2), 0) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -5367,7 +5367,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors WHERE ifNull(notEquals(funnel_actors.steps, 2), 1) GROUP BY funnel_actors.actor_id ORDER BY funnel_actors.actor_id ASC) AS source @@ -5435,7 +5435,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LEFT JOIN (SELECT groups.key AS key, groups.properties AS properties @@ -5492,7 +5492,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS funnel_actors + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS funnel_actors LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr index bfe0f81e29959..370ed267d8b65 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr @@ -44,7 +44,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -122,7 +122,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 1), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -200,7 +200,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(equals(step_reached, 1), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr index 9451ffa4690f6..328abd6df2e65 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr @@ -44,7 +44,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -122,7 +122,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 1), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -200,7 +200,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(equals(step_reached, 1), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr index a61e3cb06ab29..d869459cb3485 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr @@ -62,7 +62,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -142,7 +143,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -215,7 +217,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -296,7 +299,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -377,7 +381,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -465,7 +470,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -539,7 +545,7 @@ HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE and(ifNull(greaterOrEquals(step_reached, 0), 0), ifNull(equals(arrayFlatten(array(breakdown)), arrayFlatten(array('finance'))), isNull(arrayFlatten(array(breakdown))) and isNull(arrayFlatten(array('finance'))))) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -622,7 +628,7 @@ HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE and(ifNull(greaterOrEquals(step_reached, 1), 0), ifNull(equals(arrayFlatten(array(breakdown)), arrayFlatten(array('finance'))), isNull(arrayFlatten(array(breakdown))) and isNull(arrayFlatten(array('finance'))))) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -705,7 +711,7 @@ HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE and(ifNull(greaterOrEquals(step_reached, 0), 0), ifNull(equals(arrayFlatten(array(breakdown)), arrayFlatten(array('technology'))), isNull(arrayFlatten(array(breakdown))) and isNull(arrayFlatten(array('technology'))))) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -788,7 +794,7 @@ HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE and(ifNull(greaterOrEquals(step_reached, 1), 0), ifNull(equals(arrayFlatten(array(breakdown)), arrayFlatten(array('technology'))), isNull(arrayFlatten(array(breakdown))) and isNull(arrayFlatten(array('technology'))))) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr index 3070a213fe4da..cb4021c05d56d 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr @@ -42,7 +42,7 @@ GROUP BY aggregation_target SETTINGS date_time_output_format='iso', date_time_input_format='best_effort') WHERE and(ifNull(equals(success_bool, 1), 0), ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0)) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -118,7 +118,7 @@ GROUP BY aggregation_target SETTINGS date_time_output_format='iso', date_time_input_format='best_effort') WHERE and(ifNull(notEquals(success_bool, 1), 1), ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0)) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -194,7 +194,7 @@ GROUP BY aggregation_target SETTINGS date_time_output_format='iso', date_time_input_format='best_effort') WHERE and(ifNull(equals(success_bool, 1), 0), ifNull(equals(entrance_period_start, toDateTime64('2021-05-01 00:00:00.000000', 6, 'UTC')), 0)) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr index 90889f847b87b..b0b220a50a0bb 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_udf.ambr @@ -41,7 +41,8 @@ GROUP BY entrance_period_start, data.breakdown ORDER BY entrance_period_start ASC - LIMIT 1000 SETTINGS readonly=2, + LIMIT 1000 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -93,7 +94,8 @@ GROUP BY entrance_period_start, data.breakdown ORDER BY entrance_period_start ASC - LIMIT 1000 SETTINGS readonly=2, + LIMIT 1000 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -145,7 +147,8 @@ GROUP BY entrance_period_start, data.breakdown ORDER BY entrance_period_start ASC - LIMIT 1000 SETTINGS readonly=2, + LIMIT 1000 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr index 76914332b11c4..ea0939ad56226 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr @@ -53,7 +53,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, + LIMIT 100 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -104,7 +105,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 1), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, person.id AS id @@ -187,7 +188,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, + LIMIT 100 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -268,7 +270,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, + LIMIT 100 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -343,7 +346,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, + LIMIT 100 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -405,7 +409,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 0), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, person.id AS id @@ -477,7 +481,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 1), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, person.id AS id @@ -549,7 +553,7 @@ GROUP BY aggregation_target HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE ifNull(greaterOrEquals(step_reached, 2), 0) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT argMax(toTimeZone(person.created_at, 'UTC'), person.version) AS created_at, person.id AS id @@ -629,7 +633,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, + LIMIT 100 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -687,7 +692,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 100 SETTINGS readonly=2, + LIMIT 100 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -760,7 +766,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -840,7 +847,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -913,7 +921,8 @@ GROUP BY breakdown ORDER BY step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -994,7 +1003,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1075,7 +1085,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1163,7 +1174,8 @@ GROUP BY breakdown ORDER BY step_3 DESC, step_2 DESC, step_1 DESC) GROUP BY final_prop - LIMIT 26 SETTINGS readonly=2, + LIMIT 26 SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1237,7 +1249,7 @@ HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE and(ifNull(greaterOrEquals(step_reached, 0), 0), ifNull(equals(arrayFlatten(array(breakdown)), arrayFlatten(array('finance'))), isNull(arrayFlatten(array(breakdown))) and isNull(arrayFlatten(array('finance'))))) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -1320,7 +1332,7 @@ HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE and(ifNull(greaterOrEquals(step_reached, 1), 0), ifNull(equals(arrayFlatten(array(breakdown)), arrayFlatten(array('finance'))), isNull(arrayFlatten(array(breakdown))) and isNull(arrayFlatten(array('finance'))))) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -1403,7 +1415,7 @@ HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE and(ifNull(greaterOrEquals(step_reached, 0), 0), ifNull(equals(arrayFlatten(array(breakdown)), arrayFlatten(array('technology'))), isNull(arrayFlatten(array(breakdown))) and isNull(arrayFlatten(array('technology'))))) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person @@ -1486,7 +1498,7 @@ HAVING ifNull(greaterOrEquals(step_reached, 0), 0)) WHERE and(ifNull(greaterOrEquals(step_reached, 1), 0), ifNull(equals(arrayFlatten(array(breakdown)), arrayFlatten(array('technology'))), isNull(arrayFlatten(array(breakdown))) and isNull(arrayFlatten(array('technology'))))) - ORDER BY aggregation_target ASC) AS source + ORDER BY aggregation_target ASC SETTINGS join_algorithm='direct,parallel_hash,hash,full_sorting_merge') AS source INNER JOIN (SELECT person.id AS id FROM person diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr index 8d72ab1b70121..4b096b060262e 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr @@ -851,49 +851,14 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 ''' - SELECT groupArray(1)(date)[1] AS date, - arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, - if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value - FROM - (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, - arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) - and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, - breakdown_value AS breakdown_value, - rowNumberInAllBlocks() AS row_number - FROM - (SELECT sum(total) AS count, - day_start AS day_start, - breakdown_value AS breakdown_value - FROM - (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, - toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value - FROM events AS e SAMPLE 1.0 - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) - GROUP BY day_start, - breakdown_value) - GROUP BY day_start, - breakdown_value - ORDER BY day_start ASC, breakdown_value ASC) - GROUP BY breakdown_value - ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) - WHERE isNotNull(breakdown_value) - GROUP BY breakdown_value - ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC - LIMIT 50000 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.10 @@ -944,10 +909,10 @@ (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 2) + WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -991,10 +956,10 @@ (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 2) + WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, breakdown_value) GROUP BY day_start, @@ -1038,10 +1003,10 @@ (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 2) + WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, breakdown_value_1) GROUP BY day_start, @@ -1085,10 +1050,10 @@ (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 2) + WHERE equals(person_distinct_id_overrides.team_id, 99999) GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 2), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) + WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) GROUP BY day_start, breakdown_value_1) GROUP BY day_start, @@ -1110,143 +1075,38 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 ''' - SELECT groupArray(1)(date)[1] AS date, - arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, - if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', breakdown_value) AS breakdown_value - FROM - (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, - arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) - and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, - breakdown_value AS breakdown_value, - rowNumberInAllBlocks() AS row_number - FROM - (SELECT sum(total) AS count, - day_start AS day_start, - breakdown_value AS breakdown_value - FROM - (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, - toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value - FROM events AS e SAMPLE 1.0 - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) - GROUP BY day_start, - breakdown_value) - GROUP BY day_start, - breakdown_value - ORDER BY day_start ASC, breakdown_value ASC) - GROUP BY breakdown_value - ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) - WHERE isNotNull(breakdown_value) - GROUP BY breakdown_value - ORDER BY if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_other_$$'), 0), 2, if(ifNull(equals(breakdown_value, '$$_posthog_breakdown_null_$$'), 0), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC - LIMIT 50000 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 ''' - SELECT groupArray(1)(date)[1] AS date, - arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, - arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value - FROM - (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, - arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) - and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, - breakdown_value AS breakdown_value, - rowNumberInAllBlocks() AS row_number - FROM - (SELECT sum(total) AS count, - day_start AS day_start, - [ifNull(toString(breakdown_value_1), '$$_posthog_breakdown_null_$$')] AS breakdown_value - FROM - (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, - toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 - FROM events AS e SAMPLE 1.0 - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) - GROUP BY day_start, - breakdown_value_1) - GROUP BY day_start, - breakdown_value_1 - ORDER BY day_start ASC, breakdown_value ASC) - GROUP BY breakdown_value - ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) - WHERE arrayExists(x -> isNotNull(x), breakdown_value) - GROUP BY breakdown_value - ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC - LIMIT 50000 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.4 ''' - SELECT groupArray(1)(date)[1] AS date, - arrayFold((acc, x) -> arrayMap(i -> plus(acc[i], x[i]), range(1, plus(length(date), 1))), groupArray(ifNull(total, 0)), arrayWithConstant(length(date), reinterpretAsFloat64(0))) AS total, - arrayMap(i -> if(ifNull(ifNull(greaterOrEquals(row_number, 25), 0), 0), '$$_posthog_breakdown_other_$$', i), breakdown_value) AS breakdown_value - FROM - (SELECT arrayMap(number -> plus(toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toIntervalDay(number)), range(0, plus(coalesce(dateDiff('day', toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC'))), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))))), 1))) AS date, - arrayMap(_match_date -> arraySum(arraySlice(groupArray(ifNull(count, 0)), indexOf(groupArray(day_start) AS _days_for_count, _match_date) AS _index, plus(minus(arrayLastIndex(x -> ifNull(equals(x, _match_date), isNull(x) - and isNull(_match_date)), _days_for_count), _index), 1))), date) AS total, - breakdown_value AS breakdown_value, - rowNumberInAllBlocks() AS row_number - FROM - (SELECT sum(total) AS count, - day_start AS day_start, - [ifNull(toString(breakdown_value_1), '$$_posthog_breakdown_null_$$')] AS breakdown_value - FROM - (SELECT count(DISTINCT if(not(empty(e__override.distinct_id)), e__override.person_id, e.person_id)) AS total, - toStartOfDay(toTimeZone(e.timestamp, 'UTC')) AS day_start, - ifNull(nullIf(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(e.properties, '$some_property'), ''), 'null'), '^"|"$', '')), ''), '$$_posthog_breakdown_null_$$') AS breakdown_value_1 - FROM events AS e SAMPLE 1.0 - LEFT OUTER JOIN - (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, - person_distinct_id_overrides.distinct_id AS distinct_id - FROM person_distinct_id_overrides - WHERE equals(person_distinct_id_overrides.team_id, 99999) - GROUP BY person_distinct_id_overrides.distinct_id - HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS e__override ON equals(e.distinct_id, e__override.distinct_id) - WHERE and(equals(e.team_id, 99999), greaterOrEquals(toTimeZone(e.timestamp, 'UTC'), toStartOfDay(assumeNotNull(parseDateTime64BestEffortOrNull('2019-12-28 00:00:00', 6, 'UTC')))), lessOrEquals(toTimeZone(e.timestamp, 'UTC'), assumeNotNull(parseDateTime64BestEffortOrNull('2020-01-04 23:59:59', 6, 'UTC'))), equals(e.event, 'sign up')) - GROUP BY day_start, - breakdown_value_1) - GROUP BY day_start, - breakdown_value_1 - ORDER BY day_start ASC, breakdown_value ASC) - GROUP BY breakdown_value - ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC) - WHERE arrayExists(x -> isNotNull(x), breakdown_value) - GROUP BY breakdown_value - ORDER BY if(has(breakdown_value, '$$_posthog_breakdown_other_$$'), 2, if(has(breakdown_value, '$$_posthog_breakdown_null_$$'), 1, 0)) ASC, arraySum(total) DESC, breakdown_value ASC - LIMIT 50000 SETTINGS readonly=2, - max_execution_time=60, - allow_experimental_object_type=1, - format_csv_allow_double_quotes=0, - max_ast_elements=4000000, - max_expanded_ast_elements=4000000, - max_bytes_before_external_group_by=0 + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; ''' # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.5 diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index 3a3dabc69641a..668cd8b2afb48 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -17,7 +17,6 @@ from posthog.clickhouse import query_tagging from posthog.hogql import ast from posthog.hogql.constants import MAX_SELECT_RETURNED_ROWS, LimitContext -from posthog.hogql.context import HogQLContext from posthog.hogql.printer import to_printed_hogql from posthog.hogql.query import execute_hogql_query from posthog.hogql.timings import HogQLTimings @@ -292,7 +291,7 @@ def to_actors_query_options(self) -> InsightActorsQueryOptionsResponse: compare=res_compare, ) - def calculate(self, context: Optional[HogQLContext] = None): + def calculate(self): queries = self.to_queries() if len(queries) == 0: @@ -304,8 +303,7 @@ def calculate(self, context: Optional[HogQLContext] = None): response_hogql_query = ast.SelectSetQuery.create_from_queries(queries, "UNION ALL") with self.timings.measure("printing_hogql_for_response"): - database = context.database if context else None - response_hogql = to_printed_hogql(response_hogql_query, self.team, self.modifiers, database) + response_hogql = to_printed_hogql(response_hogql_query, self.team, self.modifiers) res_matrix: list[list[Any] | Any | None] = [None] * len(queries) timings_matrix: list[list[QueryTiming] | None] = [None] * (2 + len(queries)) @@ -332,7 +330,6 @@ def run( timings=timings, modifiers=self.modifiers, limit_context=self.limit_context, - context=context, ) timings_matrix[index + 1] = response.timings diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py index 4c13f82346011..bcc091e516203 100644 --- a/posthog/hogql_queries/query_runner.py +++ b/posthog/hogql_queries/query_runner.py @@ -58,7 +58,6 @@ WebGoalsQuery, WebOverviewQuery, WebStatsTableQuery, - WebTopClicksQuery, ) from posthog.schema_helpers import to_dict, to_json from posthog.utils import generate_cache_key, get_from_dict_or_attr @@ -144,7 +143,6 @@ def shared_insights_execution_mode(execution_mode: ExecutionMode) -> ExecutionMo SessionsTimelineQuery, WebOverviewQuery, WebStatsTableQuery, - WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ] @@ -306,16 +304,7 @@ def get_query_runner( modifiers=modifiers, limit_context=limit_context, ) - if kind == "WebTopClicksQuery": - from .web_analytics.top_clicks import WebTopClicksQueryRunner - return WebTopClicksQueryRunner( - query=query, - team=team, - timings=timings, - modifiers=modifiers, - limit_context=limit_context, - ) if kind == "WebStatsTableQuery": from .web_analytics.stats_table import WebStatsTableQueryRunner diff --git a/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr b/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr index dd53fefcdb46c..a23c828bb8864 100644 --- a/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr +++ b/posthog/hogql_queries/test/__snapshots__/test_error_tracking_query_runner.ambr @@ -134,6 +134,50 @@ max_bytes_before_external_group_by=0 ''' # --- +# name: TestErrorTrackingQueryRunner.test_ordering + ''' + SELECT count(DISTINCT events.uuid) AS occurrences, + count(DISTINCT events.`$session_id`) AS sessions, + count(DISTINCT events.distinct_id) AS users, + max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, + min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), 1) + GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + ORDER BY last_seen DESC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- +# name: TestErrorTrackingQueryRunner.test_ordering.1 + ''' + SELECT count(DISTINCT events.uuid) AS occurrences, + count(DISTINCT events.`$session_id`) AS sessions, + count(DISTINCT events.distinct_id) AS users, + max(toTimeZone(events.timestamp, 'UTC')) AS last_seen, + min(toTimeZone(events.timestamp, 'UTC')) AS first_seen, + replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') AS id + FROM events + WHERE and(equals(events.team_id, 99999), equals(events.event, '$exception'), isNotNull(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '')), 1) + GROUP BY replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$exception_issue_id'), ''), 'null'), '^"|"$', '') + ORDER BY first_seen ASC + LIMIT 101 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0 + ''' +# --- # name: TestErrorTrackingQueryRunner.test_search_query ''' SELECT count(DISTINCT events.uuid) AS occurrences, diff --git a/posthog/hogql_queries/test/test_error_tracking_query_runner.py b/posthog/hogql_queries/test/test_error_tracking_query_runner.py index 0071779a0e18c..2b8bb4e78f83a 100644 --- a/posthog/hogql_queries/test/test_error_tracking_query_runner.py +++ b/posthog/hogql_queries/test/test_error_tracking_query_runner.py @@ -1,6 +1,9 @@ from unittest import TestCase from freezegun import freeze_time +from dateutil.relativedelta import relativedelta +from django.utils.timezone import now + from posthog.hogql_queries.error_tracking_query_runner import ErrorTrackingQueryRunner, search_tokenizer from posthog.schema import ( ErrorTrackingQuery, @@ -187,7 +190,7 @@ class TestErrorTrackingQueryRunner(ClickhouseTestMixin, APIBaseTest): issue_two = "01936e80-5e69-7e70-b837-871f5cdad28b" issue_three = "01936e80-aa51-746f-aec4-cdf16a5c5332" - def create_events_and_issue(self, issue_id, distinct_ids, exception_list=None): + def create_events_and_issue(self, issue_id, distinct_ids, timestamp=None, exception_list=None): event_properties = {"$exception_issue_id": issue_id} if exception_list: event_properties["$exception_list"] = exception_list @@ -198,6 +201,7 @@ def create_events_and_issue(self, issue_id, distinct_ids, exception_list=None): event="$exception", team=self.team, properties=event_properties, + timestamp=timestamp, ) ErrorTrackingIssue.objects.create(id=issue_id, team=self.team) @@ -224,9 +228,14 @@ def setUp(self): self.create_events_and_issue( issue_id=self.issue_one, distinct_ids=[self.distinct_id_one, self.distinct_id_two], + timestamp=now() - relativedelta(hours=3), + ) + self.create_events_and_issue( + issue_id=self.issue_two, distinct_ids=[self.distinct_id_one], timestamp=now() - relativedelta(hours=2) + ) + self.create_events_and_issue( + issue_id=self.issue_three, distinct_ids=[self.distinct_id_two], timestamp=now() - relativedelta(hours=1) ) - self.create_events_and_issue(issue_id=self.issue_two, distinct_ids=[self.distinct_id_one]) - self.create_events_and_issue(issue_id=self.issue_three, distinct_ids=[self.distinct_id_two]) flush_persons_and_events() @@ -453,6 +462,24 @@ def test_hogql_filters(self): # two errors exist for person with distinct_id_two self.assertEqual(len(results), 2) + @snapshot_clickhouse_queries + def test_ordering(self): + runner = ErrorTrackingQueryRunner( + team=self.team, + query=ErrorTrackingQuery(kind="ErrorTrackingQuery", dateRange=DateRange(), orderBy="last_seen"), + ) + + results = self._calculate(runner)["results"] + self.assertEqual([r["id"] for r in results], [self.issue_three, self.issue_two, self.issue_one]) + + runner = ErrorTrackingQueryRunner( + team=self.team, + query=ErrorTrackingQuery(kind="ErrorTrackingQuery", dateRange=DateRange(), orderBy="first_seen"), + ) + + results = self._calculate(runner)["results"] + self.assertEqual([r["id"] for r in results], [self.issue_one, self.issue_two, self.issue_three]) + # def test_merges_and_defaults_groups(self): # ErrorTrackingGroup.objects.create( # team=self.team, diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py index 8336a173c04bf..1633c4389879d 100644 --- a/posthog/hogql_queries/web_analytics/stats_table.py +++ b/posthog/hogql_queries/web_analytics/stats_table.py @@ -45,30 +45,42 @@ def to_query(self) -> ast.SelectQuery: return self.to_path_scroll_bounce_query() elif self.query.includeBounceRate: return self.to_path_bounce_query() + if self.query.breakdownBy == WebStatsBreakdown.INITIAL_PAGE: if self.query.includeBounceRate: return self.to_entry_bounce_query() if self._has_session_properties(): - self._to_main_query_with_session_properties() + return self._to_main_query_with_session_properties() + return self.to_main_query() def to_main_query(self) -> ast.SelectQuery: with self.timings.measure("stats_table_query"): query = parse_select( """ +WITH + start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment, + start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment SELECT {processed_breakdown_value} AS "context.columns.breakdown_value", - uniq(filtered_person_id) AS "context.columns.visitors", - sum(filtered_pageview_count) AS "context.columns.views" + tuple( + uniqIf(filtered_person_id, current_period_segment), + uniqIf(filtered_person_id, previous_period_segment) + ) AS "context.columns.visitors", + tuple( + sumIf(filtered_pageview_count, current_period_segment), + sumIf(filtered_pageview_count, previous_period_segment) + ) AS "context.columns.views" FROM ( SELECT any(person_id) AS filtered_person_id, count() AS filtered_pageview_count, - {breakdown_value} AS breakdown_value + {breakdown_value} AS breakdown_value, + min(session.$start_timestamp) as start_timestamp FROM events WHERE and( - timestamp >= {date_from}, + timestamp >= {date_from_previous_period}, timestamp < {date_to}, events.event == '$pageview', {all_properties}, @@ -87,10 +99,12 @@ def to_main_query(self) -> ast.SelectQuery: "processed_breakdown_value": self._processed_breakdown_value(), "where_breakdown": self.where_breakdown(), "all_properties": self._all_properties(), + "date_from_previous_period": self._date_from_previous_period(), "date_from": self._date_from(), "date_to": self._date_to(), }, ) + assert isinstance(query, ast.SelectQuery) if self._include_extra_aggregation_value(): @@ -102,19 +116,29 @@ def _to_main_query_with_session_properties(self) -> ast.SelectQuery: with self.timings.measure("stats_table_query"): query = parse_select( """ +WITH + start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment, + start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment SELECT {processed_breakdown_value} AS "context.columns.breakdown_value", - uniq(filtered_person_id) AS "context.columns.visitors", - sum(filtered_pageview_count) AS "context.columns.views" + tuple( + uniqIf(filtered_person_id, current_period_segment), + uniqIf(filtered_person_id, previous_period_segment) + ) AS "context.columns.visitors", + tuple( + sumIf(filtered_pageview_count, current_period_segment), + sumIf(filtered_pageview_count, previous_period_segment) + ) AS "context.columns.views" FROM ( SELECT any(person_id) AS filtered_person_id, count() AS filtered_pageview_count, {breakdown_value} AS breakdown_value, - session.session_id AS session_id + session.session_id AS session_id, + min(session.$start_timestamp) as start_timestamp FROM events WHERE and( - timestamp >= {date_from}, + timestamp >= {date_from_previous_period}, timestamp < {date_to}, events.event == '$pageview', {event_properties}, @@ -135,6 +159,7 @@ def _to_main_query_with_session_properties(self) -> ast.SelectQuery: "where_breakdown": self.where_breakdown(), "event_properties": self._event_properties(), "session_properties": self._session_properties(), + "date_from_previous_period": self._date_from_previous_period(), "date_from": self._date_from(), "date_to": self._date_to(), }, @@ -150,21 +175,34 @@ def to_entry_bounce_query(self) -> ast.SelectQuery: with self.timings.measure("stats_table_query"): query = parse_select( """ +WITH + start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment, + start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment SELECT breakdown_value AS "context.columns.breakdown_value", - uniq(filtered_person_id) AS "context.columns.visitors", - sum(filtered_pageview_count) AS "context.columns.views", - avg(is_bounce) AS "context.columns.bounce_rate" + tuple( + uniqIf(filtered_person_id, current_period_segment), + uniqIf(filtered_person_id, previous_period_segment) + ) AS "context.columns.visitors", + tuple( + sumIf(filtered_pageview_count, current_period_segment), + sumIf(filtered_pageview_count, previous_period_segment) + ) AS "context.columns.views", + tuple( + avgIf(is_bounce, current_period_segment), + avgIf(is_bounce, previous_period_segment) + ) AS "context.columns.bounce_rate", FROM ( SELECT + {bounce_breakdown} AS breakdown_value, any(person_id) AS filtered_person_id, count() AS filtered_pageview_count, - {bounce_breakdown} AS breakdown_value, any(session.$is_bounce) AS is_bounce, - session.session_id AS session_id + session.session_id AS session_id, + min(session.$start_timestamp) as start_timestamp FROM events WHERE and( - timestamp >= {date_from}, + timestamp >= {date_from_previous_period}, timestamp < {date_to}, events.event == '$pageview', {event_properties}, @@ -184,6 +222,7 @@ def to_entry_bounce_query(self) -> ast.SelectQuery: "where_breakdown": self.where_breakdown(), "session_properties": self._session_properties(), "event_properties": self._event_properties(), + "date_from_previous_period": self._date_from_previous_period(), "date_from": self._date_from(), "date_to": self._date_to(), }, @@ -198,27 +237,33 @@ def to_path_scroll_bounce_query(self) -> ast.SelectQuery: with self.timings.measure("stats_table_bounce_query"): query = parse_select( """ +WITH + start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment, + start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment SELECT counts.breakdown_value AS "context.columns.breakdown_value", - counts.visitors AS "context.columns.visitors", - counts.views AS "context.columns.views", - bounce.bounce_rate AS "context.columns.bounce_rate", - scroll.average_scroll_percentage AS "context.columns.average_scroll_percentage", - scroll.scroll_gt80_percentage AS "context.columns.scroll_gt80_percentage" + tuple(counts.visitors, counts.previous_visitors) AS "context.columns.visitors", + tuple(counts.views, counts.previous_views) AS "context.columns.views", + tuple(bounce.bounce_rate, bounce.previous_bounce_rate) AS "context.columns.bounce_rate", + tuple(scroll.average_scroll_percentage, scroll.previous_average_scroll_percentage) AS "context.columns.average_scroll_percentage", + tuple(scroll.scroll_gt80_percentage, scroll.previous_scroll_gt80_percentage) AS "context.columns.scroll_gt80_percentage", FROM ( SELECT breakdown_value, - uniq(filtered_person_id) AS visitors, - sum(filtered_pageview_count) AS views + uniqIf(filtered_person_id, current_period_segment) AS visitors, + uniqIf(filtered_person_id, previous_period_segment) AS previous_visitors, + sumIf(filtered_pageview_count, current_period_segment) AS views, + sumIf(filtered_pageview_count, previous_period_segment) AS previous_views FROM ( SELECT any(person_id) AS filtered_person_id, count() AS filtered_pageview_count, {breakdown_value} AS breakdown_value, - session.session_id AS session_id + session.session_id AS session_id, + min(session.$start_timestamp ) AS start_timestamp FROM events WHERE and( - timestamp >= {date_from}, + timestamp >= {date_from_previous_period}, timestamp < {date_to}, events.event == '$pageview', {event_properties}, @@ -232,15 +277,17 @@ def to_path_scroll_bounce_query(self) -> ast.SelectQuery: LEFT JOIN ( SELECT breakdown_value, - avg(is_bounce) AS bounce_rate + avgIf(is_bounce, current_period_segment) AS bounce_rate, + avgIf(is_bounce, previous_period_segment) AS previous_bounce_rate FROM ( SELECT {bounce_breakdown_value} AS breakdown_value, -- use $entry_pathname to find the bounce rate for sessions that started on this pathname any(session.`$is_bounce`) AS is_bounce, - session.session_id AS session_id + session.session_id AS session_id, + min(session.$start_timestamp) as start_timestamp FROM events WHERE and( - timestamp >= {date_from}, + timestamp >= {date_from_previous_period}, timestamp < {date_to}, events.event == '$pageview', {event_properties}, @@ -255,8 +302,10 @@ def to_path_scroll_bounce_query(self) -> ast.SelectQuery: LEFT JOIN ( SELECT breakdown_value, - avgMerge(average_scroll_percentage_state) AS average_scroll_percentage, - avgMerge(scroll_gt80_percentage_state) AS scroll_gt80_percentage + avgMergeIf(average_scroll_percentage_state, current_period_segment) AS average_scroll_percentage, + avgMergeIf(average_scroll_percentage_state, previous_period_segment) AS previous_average_scroll_percentage, + avgMergeIf(scroll_gt80_percentage_state, current_period_segment) AS scroll_gt80_percentage, + avgMergeIf(scroll_gt80_percentage_state, previous_period_segment) AS previous_scroll_gt80_percentage FROM ( SELECT {scroll_breakdown_value} AS breakdown_value, -- use $prev_pageview_pathname to find the scroll depth when leaving this pathname @@ -267,10 +316,11 @@ def to_path_scroll_bounce_query(self) -> ast.SelectQuery: END ) AS scroll_gt80_percentage_state, avgState(toFloat(events.properties.`$prev_pageview_max_scroll_percentage`)) as average_scroll_percentage_state, - session.session_id AS session_id + session.session_id AS session_id, + min(session.$start_timestamp) AS start_timestamp FROM events WHERE and( - timestamp >= {date_from}, + timestamp >= {date_from_previous_period}, timestamp < {date_to}, or(events.event == '$pageview', events.event == '$pageleave'), {event_properties_for_scroll}, @@ -291,6 +341,7 @@ def to_path_scroll_bounce_query(self) -> ast.SelectQuery: "session_properties": self._session_properties(), "event_properties": self._event_properties(), "event_properties_for_scroll": self._event_properties_for_scroll(), + "date_from_previous_period": self._date_from_previous_period(), "date_from": self._date_from(), "date_to": self._date_to(), "breakdown_value": self._counts_breakdown_value(), @@ -308,25 +359,31 @@ def to_path_bounce_query(self) -> ast.SelectQuery: with self.timings.measure("stats_table_scroll_query"): query = parse_select( """ +WITH + start_timestamp >= {date_from} AND start_timestamp < {date_to} AS current_period_segment, + start_timestamp >= {date_from_previous_period} AND start_timestamp < {date_from} AS previous_period_segment SELECT counts.breakdown_value AS "context.columns.breakdown_value", - counts.visitors AS "context.columns.visitors", - counts.views AS "context.columns.views", - bounce.bounce_rate AS "context.columns.bounce_rate" + tuple(counts.visitors, counts.previous_visitors) AS "context.columns.visitors", + tuple(counts.views, counts.previous_views) AS "context.columns.views", + tuple(bounce.bounce_rate, bounce.previous_bounce_rate) AS "context.columns.bounce_rate" FROM ( SELECT breakdown_value, - uniq(filtered_person_id) AS visitors, - sum(filtered_pageview_count) AS views + uniqIf(filtered_person_id, current_period_segment) AS visitors, + uniqIf(filtered_person_id, previous_period_segment) AS previous_visitors, + sumIf(filtered_pageview_count, current_period_segment) AS views, + sumIf(filtered_pageview_count, previous_period_segment) AS previous_views FROM ( SELECT any(person_id) AS filtered_person_id, count() AS filtered_pageview_count, {breakdown_value} AS breakdown_value, - session.session_id AS session_id + session.session_id AS session_id, + min(session.$start_timestamp) AS start_timestamp FROM events WHERE and( - timestamp >= {date_from}, + timestamp >= {date_from_previous_period}, timestamp < {date_to}, events.event == '$pageview', {event_properties}, @@ -340,15 +397,17 @@ def to_path_bounce_query(self) -> ast.SelectQuery: LEFT JOIN ( SELECT breakdown_value, - avg(is_bounce) AS bounce_rate + avgIf(is_bounce, current_period_segment) AS bounce_rate, + avgIf(is_bounce, previous_period_segment) AS previous_bounce_rate FROM ( SELECT {bounce_breakdown_value} AS breakdown_value, -- use $entry_pathname to find the bounce rate for sessions that started on this pathname any(session.`$is_bounce`) AS is_bounce, - session.session_id AS session_id + session.session_id AS session_id, + min(session.$start_timestamp) AS start_timestamp FROM events WHERE and( - timestamp >= {date_from}, + timestamp >= {date_from_previous_period}, timestamp < {date_to}, events.event == '$pageview', {event_properties}, @@ -370,6 +429,7 @@ def to_path_bounce_query(self) -> ast.SelectQuery: "where_breakdown": self.where_breakdown(), "session_properties": self._session_properties(), "event_properties": self._event_properties(), + "date_from_previous_period": self._date_from_previous_period(), "date_from": self._date_from(), "date_to": self._date_to(), "bounce_breakdown_value": self._bounce_entry_pathname_breakdown(), @@ -433,6 +493,9 @@ def _date_to(self) -> ast.Expr: def _date_from(self) -> ast.Expr: return self.query_date_range.date_from_as_hogql() + def _date_from_previous_period(self) -> ast.Expr: + return self.query_date_range.previous_period_date_from_as_hogql() + def calculate(self): query = self.to_query() response = self.paginator.execute_hogql_query( @@ -450,8 +513,11 @@ def calculate(self): results, { 0: self._join_with_aggregation_value, # breakdown_value - 1: self._unsample, # views - 2: self._unsample, # visitors + 1: lambda tuple, row: (self._unsample(tuple[0], row), self._unsample(tuple[1], row)), # Views (tuple) + 2: lambda tuple, row: ( + self._unsample(tuple[0], row), + self._unsample(tuple[1], row), + ), # Visitors (tuple) }, ) diff --git a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py index c021b6d1268bc..ae4b48b0632c1 100644 --- a/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py +++ b/posthog/hogql_queries/web_analytics/test/test_web_stats_table.py @@ -147,8 +147,8 @@ def test_increase_in_users(self): self.assertEqual( [ - ["/", 2, 2], - ["/login", 1, 1], + ["/", (2, 0), (2, 0)], + ["/login", (1, 0), (1, 0)], ], results, ) @@ -168,9 +168,9 @@ def test_all_time(self): self.assertEqual( [ - ["/", 2, 2], - ["/docs", 1, 1], - ["/login", 1, 1], + ["/", (2, 0), (2, 0)], + ["/docs", (1, 0), (1, 0)], + ["/login", (1, 0), (1, 0)], ], results, ) @@ -195,7 +195,7 @@ def test_dont_filter_test_accounts(self): results = self._run_web_stats_table_query("2023-12-01", "2023-12-03", filter_test_accounts=False).results self.assertEqual( - [["/", 1, 1], ["/login", 1, 1]], + [["/", (1, 0), (1, 0)], ["/login", (1, 0), (1, 0)]], results, ) @@ -235,7 +235,7 @@ def test_limit(self): response_1 = self._run_web_stats_table_query("all", "2023-12-15", limit=1) self.assertEqual( [ - ["/", 2, 2], + ["/", (2, 0), (2, 0)], ], response_1.results, ) @@ -244,8 +244,8 @@ def test_limit(self): response_2 = self._run_web_stats_table_query("all", "2023-12-15", limit=2) self.assertEqual( [ - ["/", 2, 2], - ["/login", 1, 1], + ["/", (2, 0), (2, 0)], + ["/login", (1, 0), (1, 0)], ], response_2.results, ) @@ -280,10 +280,10 @@ def test_path_filters(self): self.assertEqual( [ - ["/cleaned/:id", 2, 2], - ["/cleaned/:id/path/:id", 1, 1], - ["/not-cleaned", 1, 1], - ["/thing_c", 1, 1], + ["/cleaned/:id", (2, 0), (2, 0)], + ["/cleaned/:id/path/:id", (1, 0), (1, 0)], + ["/not-cleaned", (1, 0), (1, 0)], + ["/thing_c", (1, 0), (1, 0)], ], results, ) @@ -308,9 +308,9 @@ def test_scroll_depth_bounce_rate_one_user(self): self.assertEqual( [ - ["/a", 1, 1, 0, 0.1, 0], - ["/b", 1, 1, None, 0.2, 0], - ["/c", 1, 1, None, 0.9, 1], + ["/a", (1, 0), (1, 0), (0, None), (0.1, None), (0, None)], + ["/b", (1, 0), (1, 0), (None, None), (0.2, None), (0, None)], + ["/c", (1, 0), (1, 0), (None, None), (0.9, None), (1, None)], ], results, ) @@ -350,9 +350,9 @@ def test_scroll_depth_bounce_rate(self): self.assertEqual( [ - ["/a", 3, 4, 1 / 3, 0.5, 0.5], - ["/b", 2, 2, None, 0.2, 0], - ["/c", 2, 2, None, 0.9, 1], + ["/a", (3, 0), (4, 0), (1 / 3, None), (0.5, None), (0.5, None)], + ["/b", (2, 0), (2, 0), (None, None), (0.2, None), (0, None)], + ["/c", (2, 0), (2, 0), (None, None), (0.9, None), (1, None)], ], results, ) @@ -393,7 +393,7 @@ def test_scroll_depth_bounce_rate_with_filter(self): self.assertEqual( [ - ["/a", 3, 4, 1 / 3, 0.5, 0.5], + ["/a", (3, 0), (4, 0), (1 / 3, None), (0.5, None), (0.5, None)], ], results, ) @@ -423,9 +423,9 @@ def test_scroll_depth_bounce_rate_path_cleaning(self): self.assertEqual( [ - ["/a/:id", 1, 1, 0, 0.1, 0], - ["/b/:id", 1, 1, None, 0.2, 0], - ["/c/:id", 1, 1, None, 0.9, 1], + ["/a/:id", (1, 0), (1, 0), (0, None), (0.1, None), (0, None)], + ["/b/:id", (1, 0), (1, 0), (None, None), (0.2, None), (0, None)], + ["/c/:id", (1, 0), (1, 0), (None, None), (0.9, None), (1, None)], ], results, ) @@ -449,9 +449,9 @@ def test_bounce_rate_one_user(self): self.assertEqual( [ - ["/a", 1, 1, 0], - ["/b", 1, 1, None], - ["/c", 1, 1, None], + ["/a", (1, 0), (1, 0), (0, None)], + ["/b", (1, 0), (1, 0), (None, None)], + ["/c", (1, 0), (1, 0), (None, None)], ], results, ) @@ -490,9 +490,9 @@ def test_bounce_rate(self): self.assertEqual( [ - ["/a", 3, 4, 1 / 3], - ["/b", 2, 2, None], - ["/c", 2, 2, None], + ["/a", (3, 0), (4, 0), (1 / 3, None)], + ["/b", (2, 0), (2, 0), (None, None)], + ["/c", (2, 0), (2, 0), (None, None)], ], results, ) @@ -532,7 +532,7 @@ def test_bounce_rate_with_property(self): self.assertEqual( [ - ["/a", 3, 4, 1 / 3], + ["/a", (3, 0), (4, 0), (1 / 3, None)], ], results, ) @@ -561,9 +561,9 @@ def test_bounce_rate_path_cleaning(self): self.assertEqual( [ - ["/a/:id", 1, 1, 0], - ["/b/:id", 1, 1, None], - ["/c/:id", 1, 1, None], + ["/a/:id", (1, 0), (1, 0), (0, None)], + ["/b/:id", (1, 0), (1, 0), (None, None)], + ["/c/:id", (1, 0), (1, 0), (None, None)], ], results, ) @@ -587,7 +587,7 @@ def test_entry_bounce_rate_one_user(self): self.assertEqual( [ - ["/a", 1, 3, 0], + ["/a", (1, 0), (3, 0), (0, None)], ], results, ) @@ -626,7 +626,7 @@ def test_entry_bounce_rate(self): self.assertEqual( [ - ["/a", 3, 8, 1 / 3], + ["/a", (3, 0), (8, 0), (1 / 3, None)], ], results, ) @@ -666,7 +666,7 @@ def test_entry_bounce_rate_with_property(self): self.assertEqual( [ - ["/a", 3, 4, 1 / 3], + ["/a", (3, 0), (4, 0), (1 / 3, None)], ], results, ) @@ -695,7 +695,7 @@ def test_entry_bounce_rate_path_cleaning(self): self.assertEqual( [ - ["/a/:id", 1, 3, 0], + ["/a/:id", (1, 0), (3, 0), (0, None)], ], results, ) @@ -743,7 +743,10 @@ def test_source_medium_campaign(self): ).results self.assertEqual( - [["google / (none) / (none)", 1, 1], ["news.ycombinator.com / referral / (none)", 1, 1]], + [ + ["google / (none) / (none)", (1, 0), (1, 0)], + ["news.ycombinator.com / referral / (none)", (1, 0), (1, 0)], + ], results, ) @@ -792,7 +795,7 @@ def test_null_in_utm_tags(self): ).results self.assertEqual( - [["google", 1.0, 1.0], [None, 1.0, 1.0]], + [["google", (1, 0), (1, 0)], [None, (1, 0), (1, 0)]], results, ) @@ -842,7 +845,7 @@ def test_is_not_set_filter(self): ).results self.assertEqual( - [[None, 1.0, 1.0]], + [[None, (1, 0), (1, 0)]], results, ) @@ -878,7 +881,7 @@ def test_same_user_multiple_sessions(self): "2024-07-31", breakdown_by=WebStatsBreakdown.INITIAL_UTM_SOURCE, ).results - assert [["google", 1, 2]] == results_session + assert [["google", (1, 0), (2, 0)]] == results_session # Try this with a query that uses event properties results_event = self._run_web_stats_table_query( @@ -886,13 +889,13 @@ def test_same_user_multiple_sessions(self): "2024-07-31", breakdown_by=WebStatsBreakdown.PAGE, ).results - assert [["/path", 1, 2]] == results_event + assert [["/path", (1, 0), (2, 0)]] == results_event # Try this with a query using the bounce rate results_event = self._run_web_stats_table_query( "all", "2024-07-31", breakdown_by=WebStatsBreakdown.PAGE, include_bounce_rate=True ).results - assert [["/path", 1, 2, None]] == results_event + assert [["/path", (1, 0), (2, 0), (None, None)]] == results_event # Try this with a query using the scroll depth results_event = self._run_web_stats_table_query( @@ -902,7 +905,7 @@ def test_same_user_multiple_sessions(self): include_bounce_rate=True, include_scroll_depth=True, ).results - assert [["/path", 1, 2, None, None, None]] == results_event + assert [["/path", (1, 0), (2, 0), (None, None), (None, None), (None, None)]] == results_event def test_no_session_id(self): d1 = "d1" @@ -935,13 +938,16 @@ def test_no_session_id(self): ).results assert [] == results - # Do show event property breakdowns of events of events with no session id + # Do show event property breakdowns of events with no session id + # but it will return 0 views because we depend on session.$start_timestamp + # to figure out the previous/current values results = self._run_web_stats_table_query( "all", "2024-07-31", breakdown_by=WebStatsBreakdown.PAGE, ).results - assert [["/path", 1, 1]] == results + + assert [["/path", (0, 0), (0, 0)]] == results def test_cohort_test_filters(self): d1 = "d1" @@ -1003,7 +1009,7 @@ def test_cohort_test_filters(self): breakdown_by=WebStatsBreakdown.PAGE, ).results - assert results == [["/path1", 1, 1]] + assert results == [["/path1", (1, 0), (1, 0)]] def test_language_filter(self): d1, s1 = "d1", str(uuid7("2024-07-30")) @@ -1079,45 +1085,61 @@ def test_language_filter(self): # which is causing this to be flaky (en-GB happens sometimes), # we'll instead assert on a reduced form where we're # not counting the country, but only the locale - # assert results == [["en-US", 1.0, 3.0], ["pt-BR", 1.0, 2.0], ["nl-", 1.0, 1.0]] + # assert results == [["en-US", (1, 0), (3, 0)], ["pt-BR", (1, 0), (2, 0)], ["nl-", (1, 0), (1, 0)]] country_results = [result[0].split("-")[0] for result in results] assert country_results == ["en", "pt", "nl"] - def test_timezone_filter(self): - date = "2024-07-30" + def test_timezone_filter_general(self): + before_date = "2024-07-14" + after_date = "2024-07-16" - for idx, (distinct_id, session_id) in enumerate( + for idx, (distinct_id, before_session_id, after_session_id) in enumerate( [ - ("UTC", str(uuid7(date))), - ("Asia/Calcutta", str(uuid7(date))), - ("America/New_York", str(uuid7(date))), - ("America/Sao_Paulo", str(uuid7(date))), + ("UTC", str(uuid7(before_date)), str(uuid7(after_date))), + ("Asia/Calcutta", str(uuid7(before_date)), str(uuid7(after_date))), + ("America/New_York", str(uuid7(before_date)), str(uuid7(after_date))), + ("America/Sao_Paulo", str(uuid7(before_date)), str(uuid7(after_date))), ] ): _create_person( team_id=self.team.pk, distinct_ids=[distinct_id], - properties={"name": session_id, "email": f"{distinct_id}@example.com"}, + properties={"name": before_session_id, "email": f"{distinct_id}@example.com"}, ) + # Always one event in the before_date + _create_event( + team=self.team, + event="$pageview", + distinct_id=distinct_id, + timestamp=before_date, + properties={"$session_id": before_session_id, "$pathname": f"/path/landing", "$timezone": distinct_id}, + ) + + # Several events in the actual range for i in range(idx + 1): _create_event( team=self.team, event="$pageview", distinct_id=distinct_id, - timestamp=date, - properties={"$session_id": session_id, "$pathname": f"/path{i}", "$timezone": distinct_id}, + timestamp=after_date, + properties={"$session_id": after_session_id, "$pathname": f"/path{i}", "$timezone": distinct_id}, ) results = self._run_web_stats_table_query( - "all", + "2024-07-15", # Period is since July first, we create some events before that date, and some after None, breakdown_by=WebStatsBreakdown.TIMEZONE, ).results # Brasilia UTC-3, New York UTC-4, Calcutta UTC+5:30, UTC - assert results == [[-3.0, 1.0, 4.0], [-4.0, 1.0, 3.0], [5.5, 1.0, 2.0], [0.0, 1.0, 1.0]] + assert results == [ + [-3, (1, 1), (4, 1)], + [-4, (1, 1), (3, 1)], + [5.5, (1, 1), (2, 1)], + [0, (1, 1), (1, 1)], + ] def test_timezone_filter_dst_change(self): did = "id" @@ -1146,7 +1168,7 @@ def test_timezone_filter_dst_change(self): ).results # Change from UTC-2 to UTC-3 in the middle of the night - assert results == [[-3.0, 1.0, 4.0], [-2.0, 1.0, 2.0]] + assert results == [[-3, (1, 0), (4, 0)], [-2, (1, 0), (2, 0)]] def test_timezone_filter_with_invalid_timezone(self): date = "2024-07-30" diff --git a/posthog/hogql_queries/web_analytics/top_clicks.py b/posthog/hogql_queries/web_analytics/top_clicks.py deleted file mode 100644 index 38c4e35dacd42..0000000000000 --- a/posthog/hogql_queries/web_analytics/top_clicks.py +++ /dev/null @@ -1,73 +0,0 @@ -from django.utils.timezone import datetime - -from posthog.hogql import ast -from posthog.hogql.parser import parse_select -from posthog.hogql.query import execute_hogql_query -from posthog.hogql_queries.utils.query_date_range import QueryDateRange -from posthog.hogql_queries.web_analytics.web_analytics_query_runner import ( - WebAnalyticsQueryRunner, -) -from posthog.models.filters.mixins.utils import cached_property -from posthog.schema import CachedWebTopClicksQueryResponse, WebTopClicksQuery, WebTopClicksQueryResponse - - -class WebTopClicksQueryRunner(WebAnalyticsQueryRunner): - query: WebTopClicksQuery - response: WebTopClicksQueryResponse - cached_response: CachedWebTopClicksQueryResponse - - def to_query(self) -> ast.SelectQuery | ast.SelectSetQuery: - with self.timings.measure("top_clicks_query"): - top_sources_query = parse_select( - """ -SELECT - properties.$el_text as el_text, - count() as total_clicks, - COUNT(DISTINCT events.person_id) as unique_visitors -FROM - events -WHERE - event == '$autocapture' -AND events.properties.$event_type = 'click' -AND el_text IS NOT NULL -AND ({events_where}) -GROUP BY - el_text -ORDER BY total_clicks DESC -LIMIT 10 - """, - timings=self.timings, - placeholders={ - "event_properties": self.events_where(), - "date_from": self.query_date_range.date_from_as_hogql(), - "date_to": self.query_date_range.date_to_as_hogql(), - }, - ) - return top_sources_query - - def calculate(self): - response = execute_hogql_query( - query_type="top_sources_query", - query=self.to_query(), - team=self.team, - timings=self.timings, - modifiers=self.modifiers, - limit_context=self.limit_context, - ) - - return WebTopClicksQueryResponse( - columns=response.columns, - results=response.results, - timings=response.timings, - types=response.types, - modifiers=self.modifiers, - ) - - @cached_property - def query_date_range(self): - return QueryDateRange( - date_range=self.query.dateRange, - team=self.team, - interval=None, - now=datetime.now(), - ) diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index 4854959d5c7a0..b73772ef79a90 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -18,7 +18,6 @@ from posthog.models.filters.mixins.utils import cached_property from posthog.schema import ( EventPropertyFilter, - WebTopClicksQuery, WebOverviewQuery, WebStatsTableQuery, PersonPropertyFilter, @@ -29,9 +28,7 @@ ) from posthog.utils import generate_cache_key, get_safe_cache -WebQueryNode = Union[ - WebOverviewQuery, WebTopClicksQuery, WebStatsTableQuery, WebGoalsQuery, WebExternalClicksTableQuery -] +WebQueryNode = Union[WebOverviewQuery, WebStatsTableQuery, WebGoalsQuery, WebExternalClicksTableQuery] class WebAnalyticsQueryRunner(QueryRunner, ABC): @@ -60,6 +57,35 @@ def property_filters_without_pathname( ) -> list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]]: return [p for p in self.query.properties if p.key != "$pathname"] + def period_aggregate(self, function_name, column_name, start, end, alias=None, params=None): + expr = ast.Call( + name=function_name + "If", + params=params, + args=[ + ast.Field(chain=[column_name]), + ast.Call( + name="and", + args=[ + ast.CompareOperation( + op=ast.CompareOperationOp.GtEq, + left=ast.Field(chain=["start_timestamp"]), + right=start, + ), + ast.CompareOperation( + op=ast.CompareOperationOp.Lt, + left=ast.Field(chain=["start_timestamp"]), + right=end, + ), + ], + ), + ], + ) + + if alias is not None: + return ast.Alias(alias=alias, expr=expr) + + return expr + def session_where(self, include_previous_period: Optional[bool] = None): properties = [ parse_expr( diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py index cb17930afc752..24ba05dbc8096 100644 --- a/posthog/hogql_queries/web_analytics/web_overview.py +++ b/posthog/hogql_queries/web_analytics/web_overview.py @@ -98,7 +98,7 @@ def session_properties(self) -> ast.Expr: return property_to_expr(properties, team=self.team, scope="event") @cached_property - def conversion_goal_expr(self) -> ast.Expr: + def conversion_goal_expr(self) -> Optional[ast.Expr]: if isinstance(self.query.conversionGoal, ActionConversionGoal): action = Action.objects.get(pk=self.query.conversionGoal.actionId, team__project_id=self.team.project_id) return action_to_expr(action) @@ -109,10 +109,10 @@ def conversion_goal_expr(self) -> ast.Expr: right=ast.Constant(value=self.query.conversionGoal.customEventName), ) else: - return ast.Constant(value=None) + return None @cached_property - def conversion_person_id_expr(self) -> ast.Expr: + def conversion_person_id_expr(self) -> Optional[ast.Expr]: if self.conversion_goal_expr: return ast.Call( name="any", @@ -128,7 +128,7 @@ def conversion_person_id_expr(self) -> ast.Expr: ], ) else: - return ast.Constant(value=None) + return None @cached_property def pageview_count_expression(self) -> ast.Expr: @@ -147,11 +147,11 @@ def pageview_count_expression(self) -> ast.Expr: return ast.Call(name="count", args=[]) @cached_property - def conversion_count_expr(self) -> ast.Expr: + def conversion_count_expr(self) -> Optional[ast.Expr]: if self.conversion_goal_expr: return ast.Call(name="countIf", args=[self.conversion_goal_expr]) else: - return ast.Constant(value=None) + return None @cached_property def event_type_expr(self) -> ast.Expr: @@ -196,13 +196,12 @@ def inner_select(self) -> ast.SelectQuery: "date_range_end": end, "event_properties": self.event_properties(), "session_properties": self.session_properties(), - "conversion_person_id_expr": self.conversion_person_id_expr, "event_type_expr": self.event_type_expr, }, ) assert isinstance(parsed_select, ast.SelectQuery) - if self.query.conversionGoal: + if self.conversion_count_expr and self.conversion_person_id_expr: parsed_select.select.append(ast.Alias(alias="conversion_count", expr=self.conversion_count_expr)) parsed_select.select.append(ast.Alias(alias="conversion_person_id", expr=self.conversion_person_id_expr)) else: @@ -237,67 +236,17 @@ def outer_select(self) -> ast.SelectQuery: end = self.query_date_range.date_to_as_hogql() def current_period_aggregate(function_name, column_name, alias, params=None): - if self.query.compare: - return ast.Alias( - alias=alias, - expr=ast.Call( - name=function_name + "If", - params=params, - args=[ - ast.Field(chain=[column_name]), - ast.Call( - name="and", - args=[ - ast.CompareOperation( - op=ast.CompareOperationOp.GtEq, - left=ast.Field(chain=["start_timestamp"]), - right=mid, - ), - ast.CompareOperation( - op=ast.CompareOperationOp.Lt, - left=ast.Field(chain=["start_timestamp"]), - right=end, - ), - ], - ), - ], - ), - ) - else: - return ast.Alias( - alias=alias, expr=ast.Call(name=function_name, params=params, args=[ast.Field(chain=[column_name])]) - ) + if not self.query.compare: + return ast.Call(name=function_name, params=params, args=[ast.Field(chain=[column_name])]) + + return self.period_aggregate(function_name, column_name, mid, end, alias=alias, params=params) def previous_period_aggregate(function_name, column_name, alias, params=None): - if self.query.compare: - return ast.Alias( - alias=alias, - expr=ast.Call( - name=function_name + "If", - params=params, - args=[ - ast.Field(chain=[column_name]), - ast.Call( - name="and", - args=[ - ast.CompareOperation( - op=ast.CompareOperationOp.GtEq, - left=ast.Field(chain=["start_timestamp"]), - right=start, - ), - ast.CompareOperation( - op=ast.CompareOperationOp.Lt, - left=ast.Field(chain=["start_timestamp"]), - right=mid, - ), - ], - ), - ], - ), - ) - else: + if not self.query.compare: return ast.Alias(alias=alias, expr=ast.Constant(value=None)) + return self.period_aggregate(function_name, column_name, start, mid, alias=alias, params=params) + if self.query.conversionGoal: select = [ current_period_aggregate("uniq", "person_id", "unique_users"), diff --git a/posthog/management/commands/migrate_action_webhooks.py b/posthog/management/commands/migrate_action_webhooks.py index 3b307300c143f..c35a90f61232b 100644 --- a/posthog/management/commands/migrate_action_webhooks.py +++ b/posthog/management/commands/migrate_action_webhooks.py @@ -135,6 +135,7 @@ def convert_to_hog_function(action: Action, inert=False) -> Optional[HogFunction inputs=validate_inputs( webhook_template.inputs_schema, {"url": {"value": webhook_url}, "method": {"value": "POST"}, "body": {"value": body}}, + function_type="destination", ), inputs_schema=webhook_template.inputs_schema, template_id=webhook_template.id, diff --git a/posthog/migrations/0524_datawarehousejoin_configuration.py b/posthog/migrations/0524_datawarehousejoin_configuration.py new file mode 100644 index 0000000000000..a0335ea867acc --- /dev/null +++ b/posthog/migrations/0524_datawarehousejoin_configuration.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2024-11-30 09:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("posthog", "0523_errortrackingsymbolset_content_hash"), + ] + + operations = [ + migrations.AddField( + model_name="datawarehousejoin", + name="configuration", + field=models.JSONField(default=dict, null=True), + ), + ] diff --git a/posthog/migrations/0525_hog_function_transpiled.py b/posthog/migrations/0525_hog_function_transpiled.py new file mode 100644 index 0000000000000..5205eb627baca --- /dev/null +++ b/posthog/migrations/0525_hog_function_transpiled.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.15 on 2024-11-19 12:03 + +from django.db import migrations, models +from django.contrib.postgres.operations import AddIndexConcurrently + + +class Migration(migrations.Migration): + atomic = False # Added to support concurrent index creation + dependencies = [("posthog", "0524_datawarehousejoin_configuration")] + + operations = [ + migrations.AddField( + model_name="hogfunction", + name="transpiled", + field=models.TextField(blank=True, null=True), + ), + migrations.AlterField( + model_name="hogfunction", + name="type", + field=models.CharField( + blank=True, + choices=[ + ("destination", "Destination"), + ("site_destination", "Site Destination"), + ("site_app", "Site App"), + ("email", "Email"), + ("sms", "Sms"), + ("push", "Push"), + ("activity", "Activity"), + ("alert", "Alert"), + ("broadcast", "Broadcast"), + ], + max_length=24, + null=True, + ), + ), + AddIndexConcurrently( + model_name="hogfunction", + index=models.Index(fields=["type", "enabled", "team"], name="posthog_hog_type_6f8967_idx"), + ), + ] diff --git a/posthog/migrations/0524_externaldatajob_pipeline_version.py b/posthog/migrations/0526_externaldatajob_pipeline_version.py similarity index 91% rename from posthog/migrations/0524_externaldatajob_pipeline_version.py rename to posthog/migrations/0526_externaldatajob_pipeline_version.py index 5611845f95ef1..dbc665dc81cd1 100644 --- a/posthog/migrations/0524_externaldatajob_pipeline_version.py +++ b/posthog/migrations/0526_externaldatajob_pipeline_version.py @@ -5,7 +5,7 @@ class Migration(migrations.Migration): dependencies = [ - ("posthog", "0523_errortrackingsymbolset_content_hash"), + ("posthog", "0525_hog_function_transpiled"), ] operations = [ diff --git a/posthog/migrations/max_migration.txt b/posthog/migrations/max_migration.txt index 9c271eb62b6ab..ef15dc798ef26 100644 --- a/posthog/migrations/max_migration.txt +++ b/posthog/migrations/max_migration.txt @@ -1 +1 @@ -0524_externaldatajob_pipeline_version +0526_externaldatajob_pipeline_version diff --git a/posthog/models/hog_functions/hog_function.py b/posthog/models/hog_functions/hog_function.py index 980481e5288d6..48e3db90a9dcd 100644 --- a/posthog/models/hog_functions/hog_function.py +++ b/posthog/models/hog_functions/hog_function.py @@ -2,13 +2,15 @@ from typing import Optional from django.db import models -from django.db.models.signals import post_save +from django.db.models.signals import post_save, post_delete from django.dispatch.dispatcher import receiver import structlog from posthog.cdp.templates.hog_function_template import HogFunctionTemplate from posthog.helpers.encrypted_fields import EncryptedJSONStringField from posthog.models.action.action import Action +from posthog.models.plugin import sync_team_inject_web_apps +from posthog.models.signals import mutable_receiver from posthog.models.team.team import Team from posthog.models.utils import UUIDModel from posthog.plugins.plugin_server_api import ( @@ -33,6 +35,8 @@ class HogFunctionState(enum.Enum): class HogFunctionType(models.TextChoices): DESTINATION = "destination" + SITE_DESTINATION = "site_destination" + SITE_APP = "site_app" EMAIL = "email" SMS = "sms" PUSH = "push" @@ -43,9 +47,16 @@ class HogFunctionType(models.TextChoices): TYPES_THAT_RELOAD_PLUGIN_SERVER = (HogFunctionType.DESTINATION, HogFunctionType.EMAIL) TYPES_WITH_COMPILED_FILTERS = (HogFunctionType.DESTINATION,) +TYPES_WITH_TRANSPILED_FILTERS = (HogFunctionType.SITE_DESTINATION, HogFunctionType.SITE_APP) +TYPES_WITH_JAVASCRIPT_SOURCE = (HogFunctionType.SITE_DESTINATION, HogFunctionType.SITE_APP) class HogFunction(UUIDModel): + class Meta: + indexes = [ + models.Index(fields=["type", "enabled", "team"]), + ] + team = models.ForeignKey("Team", on_delete=models.CASCADE) name = models.CharField(max_length=400, null=True, blank=True) description = models.TextField(blank=True, default="") @@ -57,8 +68,14 @@ class HogFunction(UUIDModel): type = models.CharField(max_length=24, choices=HogFunctionType.choices, null=True, blank=True) icon_url = models.TextField(null=True, blank=True) + + # Hog source, except for the "site_*" types, when it contains TypeScript Source hog = models.TextField() + # Used when the source language is Hog (everything except the "site_*" types) bytecode = models.JSONField(null=True, blank=True) + # Transpiled JavasScript. Used with the "site_*" types + transpiled = models.TextField(null=True, blank=True) + inputs_schema = models.JSONField(null=True) inputs = models.JSONField(null=True) encrypted_inputs: EncryptedJSONStringField = EncryptedJSONStringField(null=True, blank=True) @@ -175,3 +192,14 @@ def team_saved(sender, instance: Team, created, **kwargs): from posthog.tasks.hog_functions import refresh_affected_hog_functions refresh_affected_hog_functions.delay(team_id=instance.id) + + +@mutable_receiver([post_save, post_delete], sender=HogFunction) +def team_inject_web_apps_changd(sender, instance, created=None, **kwargs): + try: + team = instance.team + except Team.DoesNotExist: + team = None + if team is not None: + # This controls whether /decide makes extra queries to get the site apps or not + sync_team_inject_web_apps(instance.team) diff --git a/posthog/models/plugin.py b/posthog/models/plugin.py index d32a18f974939..75362578e366c 100644 --- a/posthog/models/plugin.py +++ b/posthog/models/plugin.py @@ -22,7 +22,7 @@ from posthog.models.team import Team from posthog.plugins.access import can_configure_plugins, can_install_plugins from posthog.plugins.plugin_server_api import populate_plugin_capabilities_on_workers, reload_plugins_on_workers -from posthog.plugins.site import get_decide_site_apps +from posthog.plugins.site import get_decide_site_apps, get_decide_site_functions from posthog.plugins.utils import ( download_plugin_archive, extract_plugin_code, @@ -303,6 +303,10 @@ class PluginLogEntryType(StrEnum): ERROR = "ERROR" +class TranspilerError(Exception): + pass + + def transpile(input_string: str, type: Literal["site", "frontend"] = "site") -> Optional[str]: from posthog.settings.base_variables import BASE_DIR @@ -317,7 +321,7 @@ def transpile(input_string: str, type: Literal["site", "frontend"] = "site") -> if process.returncode != 0: error = stderr.decode() - raise Exception(error) + raise TranspilerError(error) return stdout.decode() @@ -584,7 +588,7 @@ def plugin_config_reload_needed(sender, instance, created=None, **kwargs): def sync_team_inject_web_apps(team: Team): - inject_web_apps = len(get_decide_site_apps(team)) > 0 + inject_web_apps = len(get_decide_site_apps(team)) > 0 or len(get_decide_site_functions(team)) > 0 if inject_web_apps != team.inject_web_apps: team.inject_web_apps = inject_web_apps team.save(update_fields=["inject_web_apps"]) diff --git a/posthog/models/test/test_hog_function.py b/posthog/models/test/test_hog_function.py index 7cbc441e083df..bfbfa7e6f74cd 100644 --- a/posthog/models/test/test_hog_function.py +++ b/posthog/models/test/test_hog_function.py @@ -4,7 +4,7 @@ from hogvm.python.operation import HOGQL_BYTECODE_VERSION from posthog.models.action.action import Action -from posthog.models.hog_functions.hog_function import HogFunction +from posthog.models.hog_functions.hog_function import HogFunction, HogFunctionType from posthog.models.user import User from posthog.test.base import QueryMatchingTest @@ -34,13 +34,14 @@ def test_hog_function_team_no_filters_compilation(self): assert json_filters["bytecode"] == ["_H", HOGQL_BYTECODE_VERSION, 29] # TRUE def test_hog_function_filters_compilation(self): + action = Action.objects.create(team=self.team, name="Test Action") item = HogFunction.objects.create( name="Test", - type="destination", + type=HogFunctionType.DESTINATION, team=self.team, filters={ "events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}], - "actions": [{"id": "9", "name": "Test Action", "type": "actions", "order": 1}], + "actions": [{"id": str(action.pk), "name": "Test Action", "type": "actions", "order": 1}], "filter_test_accounts": True, }, ) @@ -49,7 +50,7 @@ def test_hog_function_filters_compilation(self): json_filters = to_dict(item.filters) assert json_filters == { "events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}], - "actions": [{"id": "9", "name": "Test Action", "type": "actions", "order": 1}], + "actions": [{"id": str(action.pk), "name": "Test Action", "type": "actions", "order": 1}], "filter_test_accounts": True, "bytecode": [ "_H", @@ -103,11 +104,7 @@ def test_hog_function_filters_compilation(self): 35, 33, 1, - 33, - 2, - 33, - 1, - 11, + 29, 3, 2, 4, diff --git a/posthog/models/test/test_team_model.py b/posthog/models/test/test_team_model.py index 102eb62449a71..82cd8140f9cfd 100644 --- a/posthog/models/test/test_team_model.py +++ b/posthog/models/test/test_team_model.py @@ -12,7 +12,7 @@ def test_all_users_with_access_simple_org_membership(self): all_user_with_access_ids = list(self.team.all_users_with_access().values_list("id", flat=True)) - assert all_user_with_access_ids == [self.user.id, another_user.id] + assert sorted(all_user_with_access_ids) == sorted([self.user.id, another_user.id]) def test_all_users_with_access_simple_org_membership_and_redundant_team_one(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER @@ -22,7 +22,9 @@ def test_all_users_with_access_simple_org_membership_and_redundant_team_one(self all_user_with_access_ids = list(self.team.all_users_with_access().values_list("id", flat=True)) - assert all_user_with_access_ids == [self.user.id, another_user.id] # self.user should only be listed once + assert sorted(all_user_with_access_ids) == sorted( + [self.user.id, another_user.id] + ) # self.user should only be listed once def test_all_users_with_access_while_access_control_org_membership(self): self.organization_membership.level = OrganizationMembership.Level.ADMIN diff --git a/posthog/ph_client.py b/posthog/ph_client.py index 9775ebd9a0334..4755f9df36569 100644 --- a/posthog/ph_client.py +++ b/posthog/ph_client.py @@ -1,5 +1,14 @@ from posthog.utils import get_instance_region from posthog.cloud_utils import is_cloud +from typing import Any +import structlog + +from contextlib import contextmanager + +PH_US_API_KEY = "sTMFPsFhdP1Ssg" +PH_US_HOST = "https://us.i.posthog.com" + +logger = structlog.get_logger(__name__) def get_ph_client(): @@ -16,8 +25,8 @@ def get_ph_client(): api_key = "phc_dZ4GK1LRjhB97XozMSkEwPXx7OVANaJEwLErkY1phUF" host = "https://eu.i.posthog.com" elif region == "US": - api_key = "sTMFPsFhdP1Ssg" - host = "https://us.i.posthog.com" + api_key = PH_US_API_KEY + host = PH_US_HOST if not api_key: return @@ -25,3 +34,24 @@ def get_ph_client(): ph_client = Posthog(api_key, host=host) return ph_client + + +@contextmanager +def ph_us_client(): + from posthoganalytics import Posthog + + ph_client = Posthog(PH_US_API_KEY, host=PH_US_HOST) + + def capture_ph_event(*args: Any, **kwargs: Any) -> None: + if is_cloud(): + properties = kwargs.get("properties", {}) + properties["region"] = get_instance_region() + kwargs["properties"] = properties + + ph_client.capture(*args, **kwargs) + else: + logger.info("Captured event in US region", args, kwargs) + + yield capture_ph_event + + ph_client.shutdown() diff --git a/posthog/plugins/site.py b/posthog/plugins/site.py index bf1d7e376c4b1..af59c2b5dfef6 100644 --- a/posthog/plugins/site.py +++ b/posthog/plugins/site.py @@ -19,6 +19,7 @@ class WebJsSource: class WebJsUrl: id: int url: str + type: str def get_transpiled_site_source(id: int, token: str) -> Optional[WebJsSource]: @@ -73,7 +74,35 @@ def site_app_url(source: tuple) -> str: hash = md5(f"{source[2]}-{source[3]}-{source[4]}".encode()).hexdigest() return f"/site_app/{source[0]}/{source[1]}/{hash}/" - return [asdict(WebJsUrl(source[0], site_app_url(source))) for source in sources] + return [asdict(WebJsUrl(source[0], site_app_url(source), "site_app")) for source in sources] + + +def get_decide_site_functions(team: "Team", using_database: str = "default") -> list[dict]: + from posthog.models import HogFunction + + sources = ( + HogFunction.objects.db_manager(using_database) + .filter( + team=team, + enabled=True, + type__in=("site_destination", "site_app"), + transpiled__isnull=False, + ) + .values_list( + "id", + "updated_at", + "type", + ) + .all() + ) + + def site_function_url(source: tuple) -> str: + hash = md5(str(source[1]).encode()).hexdigest() + return f"/site_function/{source[0]}/{hash}/" + + return [ + asdict(WebJsUrl(source[0], site_function_url(source), source[2] or "site_destination")) for source in sources + ] def get_site_config_from_schema(config_schema: Optional[list[dict]], config: Optional[dict]): diff --git a/posthog/queries/test/__snapshots__/test_trends.ambr b/posthog/queries/test/__snapshots__/test_trends.ambr index ad6da12dcdceb..9e2cc7eda3a1a 100644 --- a/posthog/queries/test/__snapshots__/test_trends.ambr +++ b/posthog/queries/test/__snapshots__/test_trends.ambr @@ -865,6 +865,42 @@ # --- # name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.1 ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.10 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.11 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.12 + ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, count(*) as count @@ -879,7 +915,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.13 ''' SELECT groupArray(day_start) as date, @@ -931,7 +967,7 @@ ORDER BY breakdown_value ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.14 ''' SELECT replaceRegexpAll(JSONExtractRaw(properties, '$some_property'), '^"|"$', '') AS value, @@ -947,7 +983,7 @@ OFFSET 0 ''' # --- -# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.4 +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.15 ''' SELECT groupArray(day_start) as date, @@ -999,6 +1035,102 @@ ORDER BY breakdown_value ''' # --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.2 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.3 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.4 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.5 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.6 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.7 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.8 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- +# name: TestTrends.test_dau_with_breakdown_filtering_with_sampling.9 + ''' + /* celery:posthog.tasks.tasks.sync_insight_caching_state */ + SELECT team_id, + date_diff('second', max(timestamp), now()) AS age + FROM events + WHERE timestamp > date_sub(DAY, 3, now()) + AND timestamp < now() + GROUP BY team_id + ORDER BY age; + ''' +# --- # name: TestTrends.test_filter_events_by_precalculated_cohort ''' diff --git a/posthog/schema.py b/posthog/schema.py index 93b010344e9bd..a60f6ae8ac5ab 100644 --- a/posthog/schema.py +++ b/posthog/schema.py @@ -763,7 +763,7 @@ class ErrorTrackingIssue(BaseModel): volume: Optional[Any] = None -class Order(StrEnum): +class OrderBy(StrEnum): LAST_SEEN = "last_seen" FIRST_SEEN = "first_seen" OCCURRENCES = "occurrences" @@ -1201,7 +1201,6 @@ class NodeKind(StrEnum): INSIGHT_ACTORS_QUERY_OPTIONS = "InsightActorsQueryOptions" FUNNEL_CORRELATION_QUERY = "FunnelCorrelationQuery" WEB_OVERVIEW_QUERY = "WebOverviewQuery" - WEB_TOP_CLICKS_QUERY = "WebTopClicksQuery" WEB_STATS_TABLE_QUERY = "WebStatsTableQuery" WEB_EXTERNAL_CLICKS_TABLE_QUERY = "WebExternalClicksTableQuery" WEB_GOALS_QUERY = "WebGoalsQuery" @@ -1357,7 +1356,7 @@ class QueryResponseAlternative7(BaseModel): warnings: list[HogQLNotice] -class QueryResponseAlternative38(BaseModel): +class QueryResponseAlternative36(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2622,58 +2621,7 @@ class QueryResponseAlternative10(BaseModel): types: Optional[list] = None -class QueryResponseAlternative12(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - columns: Optional[list] = None - error: Optional[str] = Field( - default=None, - description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - ) - hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: list - samplingRate: Optional[SamplingRate] = None - timings: Optional[list[QueryTiming]] = Field( - default=None, description="Measured timings for different parts of the query generation process" - ) - types: Optional[list] = None - - class QueryResponseAlternative13(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - columns: Optional[list] = None - error: Optional[str] = Field( - default=None, - description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - ) - hasMore: Optional[bool] = None - hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") - limit: Optional[int] = None - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - offset: Optional[int] = None - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: list - samplingRate: Optional[SamplingRate] = None - timings: Optional[list[QueryTiming]] = Field( - default=None, description="Measured timings for different parts of the query generation process" - ) - types: Optional[list] = None - - -class QueryResponseAlternative14(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2699,7 +2647,7 @@ class QueryResponseAlternative14(BaseModel): types: Optional[list] = None -class QueryResponseAlternative15(BaseModel): +class QueryResponseAlternative14(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2724,7 +2672,7 @@ class QueryResponseAlternative15(BaseModel): ) -class QueryResponseAlternative18(BaseModel): +class QueryResponseAlternative17(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2750,7 +2698,7 @@ class QueryResponseAlternative18(BaseModel): types: list[str] -class QueryResponseAlternative19(BaseModel): +class QueryResponseAlternative18(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2777,7 +2725,7 @@ class QueryResponseAlternative19(BaseModel): types: list[str] -class QueryResponseAlternative20(BaseModel): +class QueryResponseAlternative19(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2807,7 +2755,7 @@ class QueryResponseAlternative20(BaseModel): types: Optional[list] = Field(default=None, description="Types of returned columns") -class QueryResponseAlternative22(BaseModel): +class QueryResponseAlternative21(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2835,57 +2783,6 @@ class QueryResponseAlternative22(BaseModel): class QueryResponseAlternative24(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - columns: Optional[list] = None - error: Optional[str] = Field( - default=None, - description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - ) - hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: list - samplingRate: Optional[SamplingRate] = None - timings: Optional[list[QueryTiming]] = Field( - default=None, description="Measured timings for different parts of the query generation process" - ) - types: Optional[list] = None - - -class QueryResponseAlternative25(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - columns: Optional[list] = None - error: Optional[str] = Field( - default=None, - description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - ) - hasMore: Optional[bool] = None - hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") - limit: Optional[int] = None - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - offset: Optional[int] = None - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: list - samplingRate: Optional[SamplingRate] = None - timings: Optional[list[QueryTiming]] = Field( - default=None, description="Measured timings for different parts of the query generation process" - ) - types: Optional[list] = None - - -class QueryResponseAlternative26(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2911,7 +2808,7 @@ class QueryResponseAlternative26(BaseModel): types: Optional[list] = None -class QueryResponseAlternative27(BaseModel): +class QueryResponseAlternative25(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2936,7 +2833,7 @@ class QueryResponseAlternative27(BaseModel): ) -class QueryResponseAlternative30(BaseModel): +class QueryResponseAlternative28(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2958,7 +2855,7 @@ class QueryResponseAlternative30(BaseModel): ) -class QueryResponseAlternative31(BaseModel): +class QueryResponseAlternative29(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -2980,7 +2877,7 @@ class QueryResponseAlternative31(BaseModel): ) -class QueryResponseAlternative33(BaseModel): +class QueryResponseAlternative31(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3001,7 +2898,7 @@ class QueryResponseAlternative33(BaseModel): ) -class QueryResponseAlternative36(BaseModel): +class QueryResponseAlternative34(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3027,7 +2924,7 @@ class QueryResponseAlternative36(BaseModel): types: Optional[list] = None -class QueryResponseAlternative39(BaseModel): +class QueryResponseAlternative37(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3048,7 +2945,7 @@ class QueryResponseAlternative39(BaseModel): ) -class QueryResponseAlternative40(BaseModel): +class QueryResponseAlternative38(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3069,7 +2966,7 @@ class QueryResponseAlternative40(BaseModel): ) -class QueryResponseAlternative41(BaseModel): +class QueryResponseAlternative39(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -3560,30 +3457,6 @@ class WebStatsTableQueryResponse(BaseModel): types: Optional[list] = None -class WebTopClicksQueryResponse(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - columns: Optional[list] = None - error: Optional[str] = Field( - default=None, - description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - ) - hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: list - samplingRate: Optional[SamplingRate] = None - timings: Optional[list[QueryTiming]] = Field( - default=None, description="Measured timings for different parts of the query generation process" - ) - types: Optional[list] = None - - class ActorsPropertyTaxonomyQueryResponse(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4537,39 +4410,6 @@ class CachedWebStatsTableQueryResponse(BaseModel): types: Optional[list] = None -class CachedWebTopClicksQueryResponse(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - cache_key: str - cache_target_age: Optional[AwareDatetime] = None - calculation_trigger: Optional[str] = Field( - default=None, description="What triggered the calculation of the query, leave empty if user/immediate" - ) - columns: Optional[list] = None - error: Optional[str] = Field( - default=None, - description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - ) - hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") - is_cached: bool - last_refresh: AwareDatetime - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - next_allowed_client_refresh: AwareDatetime - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: list - samplingRate: Optional[SamplingRate] = None - timezone: str - timings: Optional[list[QueryTiming]] = Field( - default=None, description="Measured timings for different parts of the query generation process" - ) - types: Optional[list] = None - - class DashboardFilter(BaseModel): model_config = ConfigDict( extra="forbid", @@ -4731,58 +4571,7 @@ class Response4(BaseModel): types: Optional[list] = None -class Response6(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - columns: Optional[list] = None - error: Optional[str] = Field( - default=None, - description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - ) - hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: list - samplingRate: Optional[SamplingRate] = None - timings: Optional[list[QueryTiming]] = Field( - default=None, description="Measured timings for different parts of the query generation process" - ) - types: Optional[list] = None - - class Response7(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - columns: Optional[list] = None - error: Optional[str] = Field( - default=None, - description="Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", - ) - hasMore: Optional[bool] = None - hogql: Optional[str] = Field(default=None, description="Generated HogQL query.") - limit: Optional[int] = None - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - offset: Optional[int] = None - query_status: Optional[QueryStatus] = Field( - default=None, description="Query status indicates whether next to the provided data, a query is still running." - ) - results: list - samplingRate: Optional[SamplingRate] = None - timings: Optional[list[QueryTiming]] = Field( - default=None, description="Measured timings for different parts of the query generation process" - ) - types: Optional[list] = None - - -class Response8(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -4808,7 +4597,7 @@ class Response8(BaseModel): types: Optional[list] = None -class Response9(BaseModel): +class Response8(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -5498,7 +5287,7 @@ class QueryResponseAlternative9(BaseModel): ) -class QueryResponseAlternative32(BaseModel): +class QueryResponseAlternative30(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -5680,23 +5469,6 @@ class WebStatsTableQuery(BaseModel): useSessionsTable: Optional[bool] = None -class WebTopClicksQuery(BaseModel): - model_config = ConfigDict( - extra="forbid", - ) - conversionGoal: Optional[Union[ActionConversionGoal, CustomEventConversionGoal]] = None - dateRange: Optional[DateRange] = None - filterTestAccounts: Optional[bool] = None - kind: Literal["WebTopClicksQuery"] = "WebTopClicksQuery" - modifiers: Optional[HogQLQueryModifiers] = Field( - default=None, description="Modifiers used when performing the query" - ) - properties: list[Union[EventPropertyFilter, PersonPropertyFilter, SessionPropertyFilter]] - response: Optional[WebTopClicksQueryResponse] = None - sampling: Optional[Sampling] = None - useSessionsTable: Optional[bool] = None - - class ActionsNode(BaseModel): model_config = ConfigDict( extra="forbid", @@ -5786,6 +5558,7 @@ class AnyResponseType( HogQLAutocompleteResponse, Any, EventsQueryResponse, + ErrorTrackingQueryResponse, ] ] ): @@ -5797,6 +5570,7 @@ class AnyResponseType( HogQLAutocompleteResponse, Any, EventsQueryResponse, + ErrorTrackingQueryResponse, ] @@ -5888,7 +5662,7 @@ class RecordingsQuery(BaseModel): ) actions: Optional[list[dict[str, Any]]] = None console_log_filters: Optional[list[LogEntryPropertyFilter]] = None - date_from: Optional[str] = None + date_from: Optional[str] = "-3d" date_to: Optional[str] = None events: Optional[list[dict[str, Any]]] = None filter_test_accounts: Optional[bool] = None @@ -5917,8 +5691,8 @@ class RecordingsQuery(BaseModel): default=None, description="Modifiers used when performing the query" ) offset: Optional[int] = None - operand: Optional[FilterLogicalOperator] = None - order: Optional[RecordingOrder] = None + operand: Optional[FilterLogicalOperator] = FilterLogicalOperator.AND_ + order: Optional[RecordingOrder] = RecordingOrder.START_TIME person_uuid: Optional[str] = None properties: Optional[ list[ @@ -6118,7 +5892,7 @@ class CachedExperimentTrendsQueryResponse(BaseModel): variants: list[ExperimentVariantTrendsBaseStats] -class Response11(BaseModel): +class Response10(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -6148,7 +5922,8 @@ class ErrorTrackingQuery(BaseModel): modifiers: Optional[HogQLQueryModifiers] = Field( default=None, description="Modifiers used when performing the query" ) - order: Optional[Order] = None + offset: Optional[int] = None + orderBy: Optional[OrderBy] = None response: Optional[ErrorTrackingQueryResponse] = None searchQuery: Optional[str] = None select: Optional[list[str]] = None @@ -6534,7 +6309,7 @@ class LifecycleQuery(BaseModel): ) -class QueryResponseAlternative16(BaseModel): +class QueryResponseAlternative15(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -6549,7 +6324,7 @@ class QueryResponseAlternative16(BaseModel): variants: list[ExperimentVariantFunnelsBaseStats] -class QueryResponseAlternative17(BaseModel): +class QueryResponseAlternative16(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -6565,7 +6340,7 @@ class QueryResponseAlternative17(BaseModel): variants: list[ExperimentVariantTrendsBaseStats] -class QueryResponseAlternative28(BaseModel): +class QueryResponseAlternative26(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -6580,7 +6355,7 @@ class QueryResponseAlternative28(BaseModel): variants: list[ExperimentVariantFunnelsBaseStats] -class QueryResponseAlternative29(BaseModel): +class QueryResponseAlternative27(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -6596,7 +6371,7 @@ class QueryResponseAlternative29(BaseModel): variants: list[ExperimentVariantTrendsBaseStats] -class QueryResponseAlternative37(BaseModel): +class QueryResponseAlternative35(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -6626,17 +6401,15 @@ class QueryResponseAlternative( QueryResponseAlternative8, QueryResponseAlternative9, QueryResponseAlternative10, - QueryResponseAlternative12, QueryResponseAlternative13, QueryResponseAlternative14, QueryResponseAlternative15, QueryResponseAlternative16, - QueryResponseAlternative17, Any, + QueryResponseAlternative17, QueryResponseAlternative18, QueryResponseAlternative19, - QueryResponseAlternative20, - QueryResponseAlternative22, + QueryResponseAlternative21, QueryResponseAlternative24, QueryResponseAlternative25, QueryResponseAlternative26, @@ -6645,14 +6418,12 @@ class QueryResponseAlternative( QueryResponseAlternative29, QueryResponseAlternative30, QueryResponseAlternative31, - QueryResponseAlternative32, - QueryResponseAlternative33, + QueryResponseAlternative34, + QueryResponseAlternative35, QueryResponseAlternative36, QueryResponseAlternative37, QueryResponseAlternative38, QueryResponseAlternative39, - QueryResponseAlternative40, - QueryResponseAlternative41, ] ] ): @@ -6668,17 +6439,15 @@ class QueryResponseAlternative( QueryResponseAlternative8, QueryResponseAlternative9, QueryResponseAlternative10, - QueryResponseAlternative12, QueryResponseAlternative13, QueryResponseAlternative14, QueryResponseAlternative15, QueryResponseAlternative16, - QueryResponseAlternative17, Any, + QueryResponseAlternative17, QueryResponseAlternative18, QueryResponseAlternative19, - QueryResponseAlternative20, - QueryResponseAlternative22, + QueryResponseAlternative21, QueryResponseAlternative24, QueryResponseAlternative25, QueryResponseAlternative26, @@ -6687,14 +6456,12 @@ class QueryResponseAlternative( QueryResponseAlternative29, QueryResponseAlternative30, QueryResponseAlternative31, - QueryResponseAlternative32, - QueryResponseAlternative33, + QueryResponseAlternative34, + QueryResponseAlternative35, QueryResponseAlternative36, QueryResponseAlternative37, QueryResponseAlternative38, QueryResponseAlternative39, - QueryResponseAlternative40, - QueryResponseAlternative41, ] @@ -6725,7 +6492,7 @@ class CachedExperimentFunnelsQueryResponse(BaseModel): variants: list[ExperimentVariantFunnelsBaseStats] -class Response10(BaseModel): +class Response9(BaseModel): model_config = ConfigDict( extra="forbid", ) @@ -7069,12 +6836,10 @@ class DataTableNode(BaseModel): Response2, Response3, Response4, - Response6, Response7, Response8, Response9, Response10, - Response11, ] ] = None showActions: Optional[bool] = Field(default=None, description="Show the kebab menu at the end of the row") @@ -7112,7 +6877,6 @@ class DataTableNode(BaseModel): WebOverviewQuery, WebStatsTableQuery, WebExternalClicksTableQuery, - WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, @@ -7152,12 +6916,12 @@ class HogQLAutocomplete(BaseModel): WebOverviewQuery, WebStatsTableQuery, WebExternalClicksTableQuery, - WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, ExperimentFunnelsQuery, ExperimentTrendsQuery, + RecordingsQuery, ] ] = Field(default=None, description="Query in whose context to validate.") startPosition: int = Field(..., description="Start position of the editor word") @@ -7196,12 +6960,12 @@ class HogQLMetadata(BaseModel): WebOverviewQuery, WebStatsTableQuery, WebExternalClicksTableQuery, - WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, ExperimentFunnelsQuery, ExperimentTrendsQuery, + RecordingsQuery, ] ] = Field( default=None, @@ -7238,7 +7002,6 @@ class QueryRequest(BaseModel): WebOverviewQuery, WebStatsTableQuery, WebExternalClicksTableQuery, - WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, @@ -7306,7 +7069,6 @@ class QuerySchemaRoot( WebOverviewQuery, WebStatsTableQuery, WebExternalClicksTableQuery, - WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, @@ -7348,7 +7110,6 @@ class QuerySchemaRoot( WebOverviewQuery, WebStatsTableQuery, WebExternalClicksTableQuery, - WebTopClicksQuery, WebGoalsQuery, SessionAttributionExplorerQuery, ErrorTrackingQuery, diff --git a/posthog/session_recordings/queries/session_recording_list_from_filters.py b/posthog/session_recordings/queries/session_recording_list_from_filters.py index b9ea443d3c9e5..38522ae4d18ce 100644 --- a/posthog/session_recordings/queries/session_recording_list_from_filters.py +++ b/posthog/session_recordings/queries/session_recording_list_from_filters.py @@ -264,18 +264,9 @@ def _where_predicates(self) -> Union[ast.And, ast.Or]: console_logs_subquery = ast.SelectQuery( select=[ast.Field(chain=["log_source_id"])], select_from=ast.JoinExpr(table=ast.Field(chain=["console_logs_log_entries"])), - where=ast.And( + where=self._filter.ast_operand( exprs=[ - self._filter.ast_operand( - exprs=[ - property_to_expr(self._filter.console_log_filters, team=self._team), - ] - ), - ast.CompareOperation( - op=ast.CompareOperationOp.Eq, - left=ast.Field(chain=["log_source"]), - right=ast.Constant(value="session_replay"), - ), + property_to_expr(self._filter.console_log_filters, team=self._team), ] ), ) diff --git a/posthog/session_recordings/queries/session_recording_list_from_query.py b/posthog/session_recordings/queries/session_recording_list_from_query.py new file mode 100644 index 0000000000000..fe08fb85590f2 --- /dev/null +++ b/posthog/session_recordings/queries/session_recording_list_from_query.py @@ -0,0 +1,787 @@ +import re +from typing import Any, NamedTuple, cast, Optional, Union +from datetime import datetime, timedelta, UTC + +import posthoganalytics + +from posthog.constants import PropertyOperatorType +from posthog.hogql import ast +from posthog.hogql.ast import CompareOperation +from posthog.hogql.constants import HogQLGlobalSettings +from posthog.hogql.parser import parse_select +from posthog.hogql.property import property_to_expr, action_to_expr +from posthog.hogql.query import execute_hogql_query +from posthog.hogql_queries.insights.paginators import HogQLHasMorePaginator +from posthog.hogql_queries.legacy_compatibility.filter_to_query import MathAvailability, legacy_entity_to_node +from posthog.hogql_queries.utils.query_date_range import QueryDateRange +from posthog.models import Team, Entity, Action +from posthog.models.filters.mixins.utils import cached_property +from posthog.schema import ( + QueryTiming, + HogQLQueryModifiers, + PersonsOnEventsMode, + RecordingsQuery, + DateRange, + NodeKind, + EventsNode, + ActionsNode, + PropertyGroupFilterValue, + FilterLogicalOperator, + RecordingOrder, + PersonPropertyFilter, + EventPropertyFilter, + GroupPropertyFilter, + HogQLPropertyFilter, +) +from posthog.session_recordings.queries.session_replay_events import ttl_days + +import structlog + +from posthog.types import AnyPropertyFilter + +logger = structlog.get_logger(__name__) + + +def is_event_property(p: AnyPropertyFilter) -> bool: + p_type = getattr(p, "type", None) + p_key = getattr(p, "key", "") + return p_type == "event" or (p_type == "hogql" and bool(re.search(r"(? bool: + p_type = getattr(p, "type", None) + p_key = getattr(p, "key", "") + return p_type == "person" or (p_type == "hogql" and "person.properties" in p_key) + + +def is_group_property(p: AnyPropertyFilter) -> bool: + p_type = getattr(p, "type", None) + return p_type == "group" + + +def is_cohort_property(p: AnyPropertyFilter) -> bool: + p_type = getattr(p, "type", None) + return bool(p_type and "cohort" in p_type) + + +class SessionRecordingQueryResult(NamedTuple): + results: list + has_more_recording: bool + timings: list[QueryTiming] | None = None + + +class UnexpectedQueryProperties(Exception): + def __init__(self, remaining_properties: list[AnyPropertyFilter] | None): + self.remaining_properties = remaining_properties + super().__init__(f"Unexpected properties in query: {remaining_properties}") + + +def _strip_person_and_event_and_cohort_properties( + properties: list[AnyPropertyFilter] | None, +) -> list[AnyPropertyFilter] | None: + if not properties: + return None + + properties_to_keep = [ + g + for g in properties + if not is_event_property(g) + and not is_person_property(g) + and not is_group_property(g) + and not is_cohort_property(g) + ] + + return properties_to_keep + + +class SessionRecordingListFromQuery: + SESSION_RECORDINGS_DEFAULT_LIMIT = 50 + + _team: Team + _query: RecordingsQuery + + BASE_QUERY: str = """ + SELECT s.session_id, + any(s.team_id), + any(s.distinct_id), + min(s.min_first_timestamp) as start_time, + max(s.max_last_timestamp) as end_time, + dateDiff('SECOND', start_time, end_time) as duration, + argMinMerge(s.first_url) as first_url, + sum(s.click_count) as click_count, + sum(s.keypress_count) as keypress_count, + sum(s.mouse_activity_count) as mouse_activity_count, + sum(s.active_milliseconds)/1000 as active_seconds, + (duration - active_seconds) as inactive_seconds, + sum(s.console_log_count) as console_log_count, + sum(s.console_warn_count) as console_warn_count, + sum(s.console_error_count) as console_error_count, + {ongoing_selection}, + round(( + ((sum(s.active_milliseconds) / 1000 + sum(s.click_count) + sum(s.keypress_count) + sum(s.console_error_count))) -- intent + / + ((sum(s.mouse_activity_count) + dateDiff('SECOND', start_time, end_time) + sum(s.console_error_count) + sum(s.console_log_count) + sum(s.console_warn_count))) + * 100 + ), 2) as activity_score + FROM raw_session_replay_events s + WHERE {where_predicates} + GROUP BY session_id + HAVING {having_predicates} + ORDER BY {order_by} DESC + """ + + @staticmethod + def _data_to_return(results: list[Any] | None) -> list[dict[str, Any]]: + default_columns = [ + "session_id", + "team_id", + "distinct_id", + "start_time", + "end_time", + "duration", + "first_url", + "click_count", + "keypress_count", + "mouse_activity_count", + "active_seconds", + "inactive_seconds", + "console_log_count", + "console_warn_count", + "console_error_count", + "ongoing", + "activity_score", + ] + + return [ + { + **dict(zip(default_columns, row[: len(default_columns)])), + } + for row in results or [] + ] + + def __init__( + self, + team: Team, + query: RecordingsQuery, + hogql_query_modifiers: Optional[HogQLQueryModifiers], + **_, + ): + self._team = team + + self._query = query + if self._query.filter_test_accounts: + self._query.properties = self._query.properties or [] + self._query.properties += self._test_account_filters + + self._paginator = HogQLHasMorePaginator( + limit=query.limit or self.SESSION_RECORDINGS_DEFAULT_LIMIT, offset=query.offset or 0 + ) + self._hogql_query_modifiers = hogql_query_modifiers + + @cached_property + def _test_account_filters(self) -> list[AnyPropertyFilter]: + prop_filters: list[AnyPropertyFilter] = [] + for prop in self._team.test_account_filters: + match prop["type"]: + case "person": + prop_filters.append(PersonPropertyFilter(**prop)) + case "event": + prop_filters.append(EventPropertyFilter(**prop)) + case "group": + prop_filters.append(GroupPropertyFilter(**prop)) + case "hogql": + prop_filters.append(HogQLPropertyFilter(**prop)) + + return prop_filters + + @property + def ttl_days(self): + return ttl_days(self._team) + + def run(self) -> SessionRecordingQueryResult: + query = self.get_query() + + paginated_response = self._paginator.execute_hogql_query( + # TODO I guess the paginator needs to know how to handle union queries or all callers are supposed to collapse them or .... 🤷 + query=cast(ast.SelectQuery, query), + team=self._team, + query_type="SessionRecordingListQuery", + modifiers=self._hogql_query_modifiers, + settings=HogQLGlobalSettings(allow_experimental_analyzer=False), # This needs to be turned on eventually + ) + + return SessionRecordingQueryResult( + results=(self._data_to_return(self._paginator.results)), + has_more_recording=self._paginator.has_more(), + timings=paginated_response.timings, + ) + + def get_query(self): + return parse_select( + self.BASE_QUERY, + { + # Check if the most recent _timestamp is within five minutes of the current time + # proxy for a live session + "ongoing_selection": ast.Alias( + alias="ongoing", + expr=ast.CompareOperation( + left=ast.Call(name="max", args=[ast.Field(chain=["s", "_timestamp"])]), + right=ast.Constant( + # provided in a placeholder, so we can pass now from python to make tests easier 🙈 + value=datetime.now(UTC) - timedelta(minutes=5), + ), + op=ast.CompareOperationOp.GtEq, + ), + ), + "order_by": self._order_by_clause(), + "where_predicates": self._where_predicates(), + "having_predicates": self._having_predicates() or ast.Constant(value=True), + }, + ) + + def _order_by_clause(self) -> ast.Field: + # KLUDGE: we only need a default here because mypy is silly + order_by = self._query.order.value if self._query.order else RecordingOrder.START_TIME + return ast.Field(chain=[order_by]) + + @cached_property + def query_date_range(self): + return QueryDateRange( + date_range=DateRange(date_from=self._query.date_from, date_to=self._query.date_to), + team=self._team, + interval=None, + now=datetime.now(), + ) + + def _where_predicates(self) -> Union[ast.And, ast.Or]: + exprs: list[ast.Expr] = [ + ast.CompareOperation( + op=ast.CompareOperationOp.GtEq, + left=ast.Field(chain=["s", "min_first_timestamp"]), + right=ast.Constant(value=datetime.now(UTC) - timedelta(days=self.ttl_days)), + ) + ] + + person_id_compare_operation = PersonsIdCompareOperation(self._team, self._query, self.ttl_days).get_operation() + if person_id_compare_operation: + exprs.append(person_id_compare_operation) + + # we check for session_ids type not for truthiness since we want to allow empty lists + if isinstance(self._query.session_ids, list): + exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["session_id"]), + right=ast.Constant(value=self._query.session_ids), + ) + ) + + query_date_from = self.query_date_range.date_from() + if query_date_from: + exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.GtEq, + left=ast.Field(chain=["s", "min_first_timestamp"]), + right=ast.Constant(value=query_date_from), + ) + ) + + query_date_to = self.query_date_range.date_to() + if query_date_to: + exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.LtEq, + left=ast.Field(chain=["s", "min_first_timestamp"]), + right=ast.Constant(value=query_date_to), + ) + ) + + optional_exprs: list[ast.Expr] = [] + + # if in PoE mode then we should be pushing person property queries into here + events_sub_query = ReplayFiltersEventsSubQuery(self._team, self._query).get_query_for_session_id_matching() + if events_sub_query: + optional_exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["s", "session_id"]), + right=events_sub_query, + ) + ) + + # we want to avoid a join to persons since we don't ever need to select from them, + # so we create our own persons sub query here + # if PoE mode is on then this will be handled in the events subquery, and we don't need to do anything here + person_subquery = PersonsPropertiesSubQuery(self._team, self._query, self.ttl_days).get_query() + if person_subquery: + optional_exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["s", "distinct_id"]), + right=person_subquery, + ) + ) + + cohort_subquery = CohortPropertyGroupsSubQuery(self._team, self._query, self.ttl_days).get_query() + if cohort_subquery: + optional_exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["s", "distinct_id"]), + right=cohort_subquery, + ) + ) + + remaining_properties = _strip_person_and_event_and_cohort_properties(self._query.properties) + if remaining_properties: + posthoganalytics.capture_exception(UnexpectedQueryProperties(remaining_properties)) + optional_exprs.append(property_to_expr(remaining_properties, team=self._team, scope="replay")) + + if self._query.console_log_filters: + console_logs_subquery = ast.SelectQuery( + select=[ast.Field(chain=["log_source_id"])], + select_from=ast.JoinExpr(table=ast.Field(chain=["console_logs_log_entries"])), + where=property_to_expr( + # convert to a property group so we can insert the correct operand + PropertyGroupFilterValue( + type=FilterLogicalOperator.AND_ + if self.property_operand == "AND" + else FilterLogicalOperator.OR_, + values=self._query.console_log_filters, + ), + team=self._team, + ), + ) + + optional_exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["session_id"]), + right=console_logs_subquery, + ) + ) + + if optional_exprs: + exprs.append(self.ast_operand(exprs=optional_exprs)) + + return ast.And(exprs=exprs) + + def _having_predicates(self) -> ast.Expr | None: + return ( + property_to_expr(self._query.having_predicates, team=self._team, scope="replay") + if self._query.having_predicates + else None + ) + + @cached_property + def property_operand(self): + return PropertyOperatorType.AND if self._query.operand == "AND" else PropertyOperatorType.OR + + @cached_property + def ast_operand(self) -> type[Union[ast.And, ast.Or]]: + return ast.And if self.property_operand == "AND" else ast.Or + + +def poe_is_active(team: Team) -> bool: + return team.person_on_events_mode is not None and team.person_on_events_mode != PersonsOnEventsMode.DISABLED + + +class PersonsPropertiesSubQuery: + _team: Team + _query: RecordingsQuery + _ttl_days: int + + def __init__(self, team: Team, query: RecordingsQuery, ttl_days: int): + self._team = team + self._query = query + self._ttl_days = ttl_days + + def get_query(self) -> ast.SelectQuery | ast.SelectSetQuery | None: + if self.person_properties and not poe_is_active(self._team): + return parse_select( + """ + SELECT distinct_id + FROM person_distinct_ids + WHERE {where_predicates} + """, + { + "where_predicates": self._where_predicates, + }, + ) + else: + return None + + @cached_property + def property_operand(self): + return PropertyOperatorType.AND if self._query.operand == "AND" else PropertyOperatorType.OR + + @cached_property + def person_properties(self) -> PropertyGroupFilterValue | None: + person_property_groups = [g for g in (self._query.properties or []) if is_person_property(g)] + return ( + PropertyGroupFilterValue( + type=FilterLogicalOperator.AND_ if self.property_operand == "AND" else FilterLogicalOperator.OR_, + values=person_property_groups, + ) + if person_property_groups + else None + ) + + @cached_property + def _where_predicates(self) -> ast.Expr: + return ( + property_to_expr(self.person_properties, team=self._team) + if self.person_properties + else ast.Constant(value=True) + ) + + +class CohortPropertyGroupsSubQuery: + _team: Team + _query: RecordingsQuery + _ttl_days: int + + raw_cohort_to_distinct_id = """ + SELECT + distinct_id +FROM raw_person_distinct_ids +WHERE distinct_id in (SELECT distinct_id FROM raw_person_distinct_ids WHERE 1=1 AND {cohort_predicate}) +GROUP BY distinct_id +HAVING argMax(is_deleted, version) = 0 AND {cohort_predicate} + """ + + def __init__(self, team: Team, query: RecordingsQuery, ttl_days: int): + self._team = team + self._query = query + self._ttl_days = ttl_days + + def get_query(self) -> ast.SelectQuery | ast.SelectSetQuery | None: + if self.cohort_properties: + return parse_select( + self.raw_cohort_to_distinct_id, + {"cohort_predicate": property_to_expr(self.cohort_properties, team=self._team, scope="replay")}, + ) + + return None + + @cached_property + def property_operand(self): + return PropertyOperatorType.AND if self._query.operand == "AND" else PropertyOperatorType.OR + + @cached_property + def cohort_properties(self) -> PropertyGroupFilterValue | None: + cohort_property_groups = [g for g in (self._query.properties or []) if is_cohort_property(g)] + return ( + PropertyGroupFilterValue( + type=FilterLogicalOperator.AND_ if self.property_operand == "AND" else FilterLogicalOperator.OR_, + values=cohort_property_groups, + ) + if cohort_property_groups + else None + ) + + +class PersonsIdCompareOperation: + _team: Team + _query: RecordingsQuery + _ttl_days: int + + def __init__(self, team: Team, query: RecordingsQuery, ttl_days: int): + self._team = team + self._query = query + self._ttl_days = ttl_days + + def get_operation(self) -> CompareOperation | None: + q = self.get_query() + if not q: + return None + + if poe_is_active(self._team): + return ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["session_id"]), + right=q, + ) + else: + return ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["distinct_id"]), + right=q, + ) + + @cached_property + def query_date_range(self): + return QueryDateRange( + date_range=DateRange(date_from=self._query.date_from, date_to=self._query.date_to, explicitDate=True), + team=self._team, + interval=None, + now=datetime.now(), + ) + + def get_query(self) -> ast.SelectQuery | ast.SelectSetQuery | None: + if not self._query.person_uuid: + return None + + # anchor to python now so that tests can freeze time + now = datetime.utcnow() + + if poe_is_active(self._team): + return parse_select( + """ + select + distinct `$session_id` + from + events + where + person_id = {person_id} + and timestamp <= {now} + and timestamp >= {ttl_date} + and timestamp >= {date_from} + and timestamp <= {date_to} + and notEmpty(`$session_id`) + """, + { + "person_id": ast.Constant(value=self._query.person_uuid), + "ttl_days": ast.Constant(value=self._ttl_days), + "date_from": ast.Constant(value=self.query_date_range.date_from()), + "date_to": ast.Constant(value=self.query_date_range.date_to()), + "now": ast.Constant(value=now), + "ttl_date": ast.Constant(value=now - timedelta(days=self._ttl_days)), + }, + ) + else: + return parse_select( + """ + SELECT distinct_id + FROM person_distinct_ids + WHERE person_id = {person_id} + """, + { + "person_id": ast.Constant(value=self._query.person_uuid), + }, + ) + + +def _entity_to_expr(entity: EventsNode | ActionsNode) -> ast.Expr: + # KLUDGE: we should be able to use NodeKind.ActionsNode here but mypy :shrug: + if entity.kind == "ActionsNode": + action = Action.objects.get(pk=entity.id) + return action_to_expr(action) + else: + if entity.event is None: + return ast.Constant(value=True) + + return ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Field(chain=["events", "event"]), + right=ast.Constant(value=entity.name), + ) + + +class ReplayFiltersEventsSubQuery: + _team: Team + _query: RecordingsQuery + + @cached_property + def query_date_range(self): + return QueryDateRange( + date_range=DateRange(date_from=self._query.date_from, date_to=self._query.date_to, explicitDate=True), + team=self._team, + interval=None, + now=datetime.now(), + ) + + @property + def ttl_days(self): + return ttl_days(self._team) + + def __init__( + self, + team: Team, + query: RecordingsQuery, + hogql_query_modifiers: Optional[HogQLQueryModifiers] = None, + ): + self._team = team + self._query = query + self._hogql_query_modifiers = hogql_query_modifiers + + @cached_property + def _event_predicates(self): + event_exprs: list[ast.Expr] = [] + event_names: set[int | str] = set() + + for entity in self.entities: + if entity.kind == NodeKind.ACTIONS_NODE: + action = Action.objects.get(pk=int(entity.id), team__project_id=self._team.project_id) + event_names.update([ae for ae in action.get_step_events() if ae and ae not in event_names]) + else: + if entity.event and entity.event not in event_names: + event_names.add(entity.event) + + entity_exprs = [_entity_to_expr(entity=entity)] + + if entity.properties: + entity_exprs.append(property_to_expr(entity.properties, team=self._team, scope="replay_entity")) + + event_exprs.append(ast.And(exprs=entity_exprs)) + + return event_exprs, list(event_names) + + def _select_from_events(self, select_expr: ast.Expr) -> ast.SelectQuery: + return ast.SelectQuery( + select=[select_expr], + select_from=ast.JoinExpr( + table=ast.Field(chain=["events"]), + ), + where=self._where_predicates(), + having=self._having_predicates(), + group_by=[ast.Field(chain=["$session_id"])], + ) + + def get_query_for_session_id_matching(self) -> ast.SelectQuery | ast.SelectSetQuery | None: + use_poe = poe_is_active(self._team) and self.person_properties + + if self.entities or self.event_properties or self.group_properties or use_poe: + return self._select_from_events(ast.Alias(alias="session_id", expr=ast.Field(chain=["$session_id"]))) + else: + return None + + def get_query_for_event_id_matching(self) -> ast.SelectQuery | ast.SelectSetQuery: + return self._select_from_events(ast.Call(name="groupUniqArray", args=[ast.Field(chain=["uuid"])])) + + def get_event_ids_for_session(self) -> SessionRecordingQueryResult: + query = self.get_query_for_event_id_matching() + + hogql_query_response = execute_hogql_query( + query=query, + team=self._team, + query_type="SessionRecordingMatchingEventsForSessionQuery", + modifiers=self._hogql_query_modifiers, + ) + + flattened_results = [str(uuid) for row in hogql_query_response.results for uuid in row[0]] + + return SessionRecordingQueryResult( + results=flattened_results, + has_more_recording=False, + timings=hogql_query_response.timings, + ) + + def _where_predicates(self) -> ast.Expr: + exprs: list[ast.Expr] = [ + ast.Call( + name="notEmpty", + args=[ast.Field(chain=["$session_id"])], + ), + # regardless of any other filters limit between TTL and current time + ast.CompareOperation( + op=ast.CompareOperationOp.GtEq, + left=ast.Field(chain=["timestamp"]), + right=ast.Constant(value=datetime.now() - timedelta(days=self.ttl_days)), + ), + ast.CompareOperation( + op=ast.CompareOperationOp.LtEq, + left=ast.Field(chain=["timestamp"]), + right=ast.Call(name="now", args=[]), + ), + ] + + # TRICKY: we're adding a buffer to the date range to ensure we get all the events + # you can start sending us events before the session starts + if self._query.date_from: + exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.GtEq, + left=ast.Field(chain=["timestamp"]), + right=ast.Constant(value=self.query_date_range.date_from() - timedelta(minutes=2)), + ) + ) + + # but we don't want to include events after date_to if provided + if self._query.date_to: + exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.LtEq, + left=ast.Field(chain=["timestamp"]), + right=ast.Constant(value=self.query_date_range.date_to()), + ) + ) + + (event_where_exprs, _) = self._event_predicates + if event_where_exprs: + # we OR all events in the where and use hasAll / hasAny in the HAVING clause + exprs.append(ast.Or(exprs=event_where_exprs)) + + if self.event_properties: + exprs.append(property_to_expr(self.event_properties, team=self._team, scope="replay")) + + if self.group_properties: + exprs.append(property_to_expr(self.group_properties, team=self._team)) + + if self._team.person_on_events_mode and self.person_properties: + exprs.append(property_to_expr(self.person_properties, team=self._team, scope="event")) + + if self._query.session_ids: + exprs.append( + ast.CompareOperation( + op=ast.CompareOperationOp.In, + left=ast.Field(chain=["$session_id"]), + right=ast.Constant(value=self._query.session_ids), + ) + ) + + return ast.And(exprs=exprs) + + def _having_predicates(self) -> ast.Expr: + (_, event_names) = self._event_predicates + + if event_names: + return ast.Call( + name="hasAll" if self.property_operand == PropertyOperatorType.AND else "hasAny", + args=[ + ast.Call(name="groupUniqArray", args=[ast.Field(chain=["event"])]), + # KLUDGE: sorting only so that snapshot tests are consistent + ast.Constant(value=sorted(event_names)), + ], + ) + + return ast.Constant(value=True) + + @cached_property + def action_entities(self): + # TODO what do we send to the API instead to avoid needing to do this + return [legacy_entity_to_node(Entity(e), True, MathAvailability.Unavailable) for e in self._query.actions or []] + + @cached_property + def event_entities(self): + # TODO what do we send to the API instead to avoid needing to do this + # TODO is this overkill since it feels like we only need a few things off the entity + return [legacy_entity_to_node(Entity(e), True, MathAvailability.Unavailable) for e in self._query.events or []] + + @cached_property + def entities(self): + return self.action_entities + self.event_entities + + @cached_property + def event_properties(self): + return [g for g in (self._query.properties or []) if is_event_property(g)] + + @cached_property + def group_properties(self): + return [g for g in (self._query.properties or []) if is_group_property(g)] + + @cached_property + def property_operand(self): + return PropertyOperatorType.AND if self._query.operand == "AND" else PropertyOperatorType.OR + + @cached_property + def person_properties(self) -> PropertyGroupFilterValue | None: + person_property_groups = [g for g in (self._query.properties or []) if is_person_property(g)] + return ( + PropertyGroupFilterValue( + type=FilterLogicalOperator.AND_ if self.property_operand == "AND" else FilterLogicalOperator.OR_, + values=person_property_groups, + ) + if person_property_groups + else None + ) diff --git a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr index 2eae8db4446f2..82e70f3063a2b 100644 --- a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr +++ b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr @@ -2683,7 +2683,7 @@ allow_experimental_analyzer=0 ''' # --- -# name: TestSessionRecordingsListFromFilters.test_filter_for_recordings_by_console_text +# name: TestSessionRecordingsListFromFilters.test_filter_for_recordings_by_console_text_0__key_level_value_warn_error_operator_exact_type_log_entry_key_message_value_message_4_operator_icontains_type_log_entry_ ''' SELECT s.session_id AS session_id, any(s.team_id), @@ -2706,10 +2706,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(or(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(equals(console_logs_log_entries.message, 'message 4'), 0)), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE or(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(ilike(console_logs_log_entries.message, '%message 4%'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2724,7 +2724,7 @@ allow_experimental_analyzer=0 ''' # --- -# name: TestSessionRecordingsListFromFilters.test_filter_for_recordings_by_console_text.1 +# name: TestSessionRecordingsListFromFilters.test_filter_for_recordings_by_console_text_1__key_level_value_warn_error_operator_exact_type_log_entry_key_message_value_message_5_operator_icontains_type_log_entry_ ''' SELECT s.session_id AS session_id, any(s.team_id), @@ -2747,10 +2747,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(and(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(ilike(console_logs_log_entries.message, '%message 5%'), 0)), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE and(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(ilike(console_logs_log_entries.message, '%message 5%'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2765,7 +2765,7 @@ allow_experimental_analyzer=0 ''' # --- -# name: TestSessionRecordingsListFromFilters.test_filter_for_recordings_by_console_text.2 +# name: TestSessionRecordingsListFromFilters.test_filter_for_recordings_by_console_text_2__key_level_value_info_operator_exact_type_log_entry_key_message_value_message_5_operator_icontains_type_log_entry_ ''' SELECT s.session_id AS session_id, any(s.team_id), @@ -2788,10 +2788,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(ilike(console_logs_log_entries.message, '%message 5%'), 0)), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(ilike(console_logs_log_entries.message, '%message 5%'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2899,10 +2899,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(equals(console_logs_log_entries.level, 'error'), 0), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE ifNull(equals(console_logs_log_entries.level, 'error'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2940,10 +2940,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -2981,10 +2981,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3022,10 +3022,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE ifNull(equals(console_logs_log_entries.level, 'warn'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3063,10 +3063,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE ifNull(equals(console_logs_log_entries.level, 'warn'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3104,10 +3104,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3145,10 +3145,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3186,10 +3186,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 20:00:00.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -3793,6 +3793,41 @@ allow_experimental_analyzer=0 ''' # --- +# name: TestSessionRecordingsListFromFilters.test_listing_ignores_future_replays + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-08-30 11:55:01.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-08-09 12:00:01.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-08-23 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-08-30 12:00:01.000000', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- # name: TestSessionRecordingsListFromFilters.test_multiple_event_filters ''' SELECT s.session_id AS session_id, @@ -4113,7 +4148,130 @@ allow_experimental_analyzer=0 ''' # --- -# name: TestSessionRecordingsListFromFilters.test_operand_or_filters +# name: TestSessionRecordingsListFromFilters.test_operand_or_filters_0__key_level_value_warn_operator_exact_type_log_entry_key_message_value_random_operator_exact_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE and(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_operand_or_filters_1__key_level_value_info_operator_exact_type_log_entry_key_message_value_random_operator_exact_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_operand_or_filters_2__key_level_value_warn_operator_exact_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'warn'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromFilters.test_operand_or_filters_3__key_message_value_random_operator_exact_type_log_entry_ ''' SELECT s.session_id AS session_id, any(s.team_id), @@ -4136,10 +4294,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(and(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0)), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE ifNull(equals(console_logs_log_entries.message, 'random'), 0)))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC @@ -4154,7 +4312,7 @@ allow_experimental_analyzer=0 ''' # --- -# name: TestSessionRecordingsListFromFilters.test_operand_or_filters.1 +# name: TestSessionRecordingsListFromFilters.test_operand_or_filters_4__key_level_value_warn_operator_exact_type_log_entry_key_message_value_random_operator_exact_type_log_entry_ ''' SELECT s.session_id AS session_id, any(s.team_id), @@ -4177,10 +4335,10 @@ WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT console_logs_log_entries.log_source_id AS log_source_id FROM - (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message, log_entries.log_source AS log_source + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message FROM log_entries WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries - WHERE and(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0)), ifNull(equals(console_logs_log_entries.log_source, 'session_replay'), 0))))) + WHERE or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0))))) GROUP BY s.session_id HAVING 1 ORDER BY start_time DESC diff --git a/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr new file mode 100644 index 0000000000000..34f0b42e60970 --- /dev/null +++ b/posthog/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_query.ambr @@ -0,0 +1,5853 @@ +# serializer version: 1 +# name: TestSessionRecordingsListFromQuery.test_action_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-01-03 23:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'custom-event'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0), ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_action_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-01-03 23:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_action_filter.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-01-03 23:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0))), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_action_filter.3 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-01-03 23:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-01-04 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-14 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2022-12-31 23:58:00.000000', 6, 'UTC')), and(and(equals(events.event, 'custom-event'), and(ifNull(equals(nullIf(nullIf(events.`$session_id`, ''), 'null'), 'test_action_filter-session-one'), 0), ifNull(equals(nullIf(nullIf(events.`$window_id`, ''), 'null'), 'test_action_filter-window-id'), 0))), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom-event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_all_filters_at_once + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-22 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-04 00:00:00.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-22 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-04 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-21 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-04 00:00:00.000000', 6, 'UTC')), or(equals(events.event, 'custom-event'), equals(events.event, '$pageview'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'custom-event'])))) + GROUP BY s.session_id + HAVING ifNull(greater(duration, 60.0), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_any_event_filter_with_properties + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), 1) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_any_event_filter_with_properties.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_any_event_filter_with_properties.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_any_event_filter_with_properties_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), 1) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_any_event_filter_with_properties_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_any_event_filter_with_properties_materialized.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(1, ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_active_sessions + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING ifNull(greater(duration, 60.0), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_active_sessions.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING ifNull(greater(active_seconds, '60'), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_active_sessions.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING ifNull(greater(inactive_seconds, '60'), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_with_ordering + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY active_seconds DESC + LIMIT 4 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_with_ordering.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY console_error_count DESC + LIMIT 4 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_with_ordering.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 4 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_with_paging + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 2 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_with_paging.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 2 + OFFSET 1 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_basic_query_with_paging.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 2 + OFFSET 2 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_date_from_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_date_from_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-30 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_date_from_filter_cannot_search_before_ttl + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 12:41:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 12:46:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-12 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_date_from_filter_cannot_search_before_ttl.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 12:41:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 12:46:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_date_from_filter_cannot_search_before_ttl.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 12:41:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 12:46:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-10 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_date_to_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-28 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_date_to_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_duration_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING ifNull(greater(duration, 60.0), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_duration_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING ifNull(less(duration, 60.0), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$autocapture')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$autocapture'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_has_ttl_applied_too + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_has_ttl_applied_too.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_active_sessions + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING ifNull(greater(duration, 60.0), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_active_sessions.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING ifNull(greater(active_seconds, 60.0), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_group_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 99999), equals(index, 1)) + GROUP BY groups.group_type_index, groups.group_key) AS events__group_1 ON equals(events.`$group_1`, events__group_1.key) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__group_1.properties___name, 'org one'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_group_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 99999), equals(index, 1)) + GROUP BY groups.group_type_index, groups.group_key) AS events__group_1 ON equals(events.`$group_1`, events__group_1.key) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__group_1.properties___name, 'org one'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_group_filter.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT JOIN + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(groups.group_properties, 'name'), ''), 'null'), '^"|"$', ''), toTimeZone(groups._timestamp, 'UTC')) AS properties___name, groups.group_type_index AS index, groups.group_key AS key + FROM groups + WHERE and(equals(groups.team_id, 99999), equals(index, 2)) + GROUP BY groups.group_type_index, groups.group_key) AS events__group_2 ON equals(events.`$group_2`, events__group_2.key) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__group_2.properties___name, 'org one'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_event_properties_test_accounts_excluded + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_event_properties_test_accounts_excluded.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_event_properties_test_accounts_excluded.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_event_properties_test_accounts_excluded_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_event_properties_test_accounts_excluded_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_event_properties_test_accounts_excluded_materialized.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_person_properties + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'bla'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_person_properties.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(events__person.properties___email, 'something else'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_properties + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_properties.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_properties_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_hogql_properties_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_matching_on_session_id + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_matching_on_session_id.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$autocapture')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$autocapture'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_properties + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_properties.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_properties.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_properties.3 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Safari'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_properties_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_properties_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, '$pageview'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Firefox'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_properties_materialized.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Chrome'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_properties_materialized.3 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(equals(events.event, 'a_different_event'), ifNull(equals(nullIf(nullIf(events.`mat_$browser`, ''), 'null'), 'Safari'), 0))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['a_different_event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_test_accounts_excluded + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), and(ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_test_accounts_excluded.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_test_accounts_excluded_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), and(ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$browser'), ''), 'null'), '^"|"$', ''), 'Chrome'), 0)), ifNull(notILike(events__person.properties___email, '%@posthog.com%'), 1)) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_test_accounts_excluded_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_event_filter_with_two_events_and_multiple_teams + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, '$pageleave'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageleave', '$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_by_console_text_0__key_level_value_warn_error_operator_exact_type_log_entry_key_message_value_message_4_operator_icontains_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE or(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(ilike(console_logs_log_entries.message, '%message 4%'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_by_console_text_1__key_level_value_warn_error_operator_exact_type_log_entry_key_message_value_message_4_operator_icontains_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE and(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(ilike(console_logs_log_entries.message, '%message 4%'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_by_console_text_2__key_level_value_warn_error_operator_exact_type_log_entry_key_message_value_message_5_operator_icontains_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE and(or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0)), ifNull(ilike(console_logs_log_entries.message, '%message 5%'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_by_console_text_3__key_level_value_info_operator_exact_type_log_entry_key_message_value_message_5_operator_icontains_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(ilike(console_logs_log_entries.message, '%message 5%'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_by_snapshot_source + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING ifNull(equals(argMinMerge(s.snapshot_source), 'web'), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_by_snapshot_source.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING ifNull(equals(argMinMerge(s.snapshot_source), 'mobile'), 0) + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_with_console_errors + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'error'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_with_console_errors.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_with_console_logs + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_with_console_logs.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'warn'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_with_console_warns + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'warn'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_with_console_warns.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_with_mixed_console_counts + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.level, 'error'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_for_recordings_with_mixed_console_counts.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'info'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_on_session_ids + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), + ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), + in(s.session_id, + ['00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000001' /* ... */], + ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), + ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_on_session_ids.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), + ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), + in(s.session_id, + ['00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000001' /* ... */], + ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), + ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_cohort_properties + ''' + + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 99999 + AND cohort_id = 99999 + AND version = NULL + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_cohort_properties.1 + ''' + /* cohort_calculation: */ + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 99999 + AND cohort_id = 99999 + AND version = 0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_cohort_properties.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 23:59:59.999999', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) + GROUP BY person_distinct_id2.distinct_id + HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_events_and_cohorts + ''' + + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 99999 + AND cohort_id = 99999 + AND version = NULL + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_events_and_cohorts.1 + ''' + /* cohort_calculation: */ + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 99999 + AND cohort_id = 99999 + AND version = 0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_events_and_cohorts.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 23:59:59.999999', 6, 'UTC')), 0), and(in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview']))), in(s.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) + GROUP BY person_distinct_id2.distinct_id + HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0))))))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_events_and_cohorts.3 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 23:59:59.999999', 6, 'UTC')), 0), and(in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-08-17 23:58:00.000000', 6, 'UTC')), equals(events.event, 'custom_event')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['custom_event']))), in(s.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) + GROUP BY person_distinct_id2.distinct_id + HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0))))))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_person_properties_exact + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla@gmail.com'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_person_properties_not_contains + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), ifNull(notILike(events__person.properties___email, '%gmail.com%'), 1)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_static_and_dynamic_cohort_properties + ''' + + SELECT count(DISTINCT person_id) + FROM person_static_cohort + WHERE team_id = 99999 + AND cohort_id = 99999 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_static_and_dynamic_cohort_properties.1 + ''' + + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 99999 + AND cohort_id = 99999 + AND version = NULL + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_static_and_dynamic_cohort_properties.2 + ''' + /* cohort_calculation: */ + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 99999 + AND cohort_id = 99999 + AND version = 0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_static_and_dynamic_cohort_properties.3 + ''' + + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 99999 + AND cohort_id = 99999 + AND version = NULL + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_static_and_dynamic_cohort_properties.4 + ''' + /* cohort_calculation: */ + SELECT count(DISTINCT person_id) + FROM cohortpeople + WHERE team_id = 99999 + AND cohort_id = 99999 + AND version = 0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_static_and_dynamic_cohort_properties.5 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 23:59:59.999999', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, + (SELECT person_static_cohort.person_id AS person_id + FROM person_static_cohort + WHERE and(equals(person_static_cohort.team_id, 99999), equals(person_static_cohort.cohort_id, 99999)))))))) + GROUP BY person_distinct_id2.distinct_id + HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, + (SELECT person_static_cohort.person_id AS person_id + FROM person_static_cohort + WHERE and(equals(person_static_cohort.team_id, 99999), equals(person_static_cohort.cohort_id, 99999)))))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_static_and_dynamic_cohort_properties.6 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 23:59:59.999999', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), 1, in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) + GROUP BY person_distinct_id2.distinct_id + HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_filter_with_static_and_dynamic_cohort_properties.7 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-08-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-07-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-08-21 23:59:59.999999', 6, 'UTC')), 0), in(s.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), in(person_distinct_id2.distinct_id, + (SELECT person_distinct_id2.distinct_id AS distinct_id + FROM person_distinct_id2 + WHERE and(equals(person_distinct_id2.team_id, 99999), 1, and(in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))), in(person_distinct_id2.person_id, + (SELECT person_static_cohort.person_id AS person_id + FROM person_static_cohort + WHERE and(equals(person_static_cohort.team_id, 99999), equals(person_static_cohort.cohort_id, 99999))))))))) + GROUP BY person_distinct_id2.distinct_id + HAVING and(ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0), and(in(person_distinct_id2.person_id, + (SELECT cohortpeople.person_id AS person_id + FROM cohortpeople + WHERE and(equals(cohortpeople.team_id, 99999), equals(cohortpeople.cohort_id, 99999), equals(cohortpeople.version, 0)))), in(person_distinct_id2.person_id, + (SELECT person_static_cohort.person_id AS person_id + FROM person_static_cohort + WHERE and(equals(person_static_cohort.team_id, 99999), equals(person_static_cohort.cohort_id, 99999))))))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_listing_ignores_future_replays + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2023-08-30 11:55:01.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-08-09 12:00:01.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-08-27 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2023-08-30 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_multiple_event_filters + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_multiple_event_filters.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event2'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event2'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_multiple_event_filters.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'new-event2'))) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'new-event2'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_multiple_event_filters.3 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'bar'), ''), 'null'), '^"|"$', ''), 'foo'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_multiple_event_filters.4 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, 'new-event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'new-event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_multiple_event_filters.5 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(and(equals(events.event, '$pageview'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)), and(equals(events.event, 'new-event'), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'foo'), ''), 'null'), '^"|"$', ''), 'bar'), 0)))) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'new-event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_event_filters + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'custom_event'))) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview', 'custom_event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_event_filters.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(equals(events.event, '$pageview'), equals(events.event, 'custom_event'))) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview', 'custom_event'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_filters_0__key_level_value_warn_operator_exact_type_log_entry_key_message_value_random_operator_exact_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE and(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_filters_1__key_level_value_info_operator_exact_type_log_entry_key_message_value_random_operator_exact_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE and(ifNull(equals(console_logs_log_entries.level, 'info'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_filters_2__key_level_value_warn_operator_exact_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.level, 'warn'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_filters_3__key_message_value_random_operator_exact_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE ifNull(equals(console_logs_log_entries.message, 'random'), 0)))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_filters_4__key_level_value_warn_operator_exact_type_log_entry_key_message_value_random_operator_exact_type_log_entry_ + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT console_logs_log_entries.log_source_id AS log_source_id + FROM + (SELECT log_entries.log_source_id AS log_source_id, log_entries.level AS level, log_entries.message AS message + FROM log_entries + WHERE and(equals(log_entries.team_id, 99999), equals(log_entries.log_source, 'session_replay'))) AS console_logs_log_entries + WHERE or(ifNull(equals(console_logs_log_entries.level, 'warn'), 0), ifNull(equals(console_logs_log_entries.message, 'random'), 0))))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_mandatory_filters + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_mandatory_filters.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_mandatory_filters.2 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, ['session_id_one']), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), in(events.`$session_id`, ['session_id_one'])) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_mandatory_filters.3 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, ['session_id_two']), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview'), in(events.`$session_id`, ['session_id_two'])) + GROUP BY events.`$session_id` + HAVING hasAny(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_person_filters + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), and(ifNull(equals(events__person.properties___email, 'test@posthog.com'), 0), ifNull(equals(events__person.properties___email, 'david@posthog.com'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_operand_or_person_filters.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-28 23:58:00.000000', 6, 'UTC')), or(ifNull(equals(events__person.properties___email, 'test@posthog.com'), 0), ifNull(equals(events__person.properties___email, 'david@posthog.com'), 0))) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_ordering + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_ordering.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY mouse_activity_count DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_person_id_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + (SELECT DISTINCT events.`$session_id` AS `$session_id` + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + WHERE and(equals(events.team_id, 99999), ifNull(equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), '00000000-0000-0000-0000-000000000000'), 0), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), notEmpty(events.`$session_id`)))), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_sessions_with_current_data + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-29 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 23:59:59.999999', 6, 'UTC')), 0)) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_host_property_test_account_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_host_property_test_account_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(not(match(toString(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, '$host'), ''), 'null'), '^"|"$', '')), '^(localhost|127\\.0\\.0\\.1)($|:)')), 1)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_host_property_test_account_filter_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_host_property_test_account_filter_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(not(match(toString(nullIf(nullIf(events.`mat_$host`, ''), 'null')), '^(localhost|127\\.0\\.0\\.1)($|:)')), 1)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_property_test_account_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_property_test_account_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_property_test_account_filter_allowing_denormalized_props + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_property_test_account_filter_allowing_denormalized_props.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_property_test_account_filter_allowing_denormalized_props_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_property_test_account_filter_allowing_denormalized_props_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_property_test_account_filter_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_event_property_test_account_filter_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'false'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_hogql_event_property_test_account_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_hogql_event_property_test_account_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, 'is_internal_user'), ''), 'null'), '^"|"$', ''), 'true'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_hogql_event_property_test_account_filter_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_hogql_event_property_test_account_filter_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(events.mat_is_internal_user, ''), 'null'), 'true'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_hogql_person_property_test_account_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_hogql_person_property_test_account_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_hogql_person_property_test_account_filter_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_hogql_person_property_test_account_filter_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_person_property_test_account_filter + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_person_property_test_account_filter.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, 'email'), ''), 'null'), '^"|"$', '') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_person_property_test_account_filter_materialized + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), equals(events.event, '$pageview')) + GROUP BY events.`$session_id` + HAVING hasAll(groupUniqArray(events.event), ['$pageview'])))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- +# name: TestSessionRecordingsListFromQuery.test_top_level_person_property_test_account_filter_materialized.1 + ''' + SELECT s.session_id AS session_id, + any(s.team_id), + any(s.distinct_id), + min(toTimeZone(s.min_first_timestamp, 'UTC')) AS start_time, + max(toTimeZone(s.max_last_timestamp, 'UTC')) AS end_time, + dateDiff('SECOND', start_time, end_time) AS duration, + argMinMerge(s.first_url) AS first_url, + sum(s.click_count) AS click_count, + sum(s.keypress_count) AS keypress_count, + sum(s.mouse_activity_count) AS mouse_activity_count, + divide(sum(s.active_milliseconds), 1000) AS active_seconds, + minus(duration, active_seconds) AS inactive_seconds, + sum(s.console_log_count) AS console_log_count, + sum(s.console_warn_count) AS console_warn_count, + sum(s.console_error_count) AS console_error_count, + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-21 19:55:00.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score + FROM session_replay_events AS s + WHERE and(equals(s.team_id, 99999), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-18 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-21 23:59:59.999999', 6, 'UTC')), 0), in(s.session_id, + (SELECT events.`$session_id` AS session_id + FROM events + LEFT OUTER JOIN + (SELECT argMax(person_distinct_id_overrides.person_id, person_distinct_id_overrides.version) AS person_id, person_distinct_id_overrides.distinct_id AS distinct_id + FROM person_distinct_id_overrides + WHERE equals(person_distinct_id_overrides.team_id, 99999) + GROUP BY person_distinct_id_overrides.distinct_id + HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) + LEFT JOIN + (SELECT person.id AS id, nullIf(nullIf(person.pmat_email, ''), 'null') AS properties___email + FROM person + WHERE and(equals(person.team_id, 99999), ifNull(in(tuple(person.id, person.version), + (SELECT person.id AS id, max(person.version) AS version + FROM person + WHERE equals(person.team_id, 99999) + GROUP BY person.id + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 99999), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2020-12-31 20:00:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, 'UTC'), now64(6, 'UTC')), greaterOrEquals(toTimeZone(events.timestamp, 'UTC'), toDateTime64('2021-01-17 23:58:00.000000', 6, 'UTC')), ifNull(equals(events__person.properties___email, 'bla'), 0)) + GROUP BY events.`$session_id` + HAVING 1))) + GROUP BY s.session_id + HAVING 1 + ORDER BY start_time DESC + LIMIT 51 + OFFSET 0 SETTINGS readonly=2, + max_execution_time=60, + allow_experimental_object_type=1, + format_csv_allow_double_quotes=0, + max_ast_elements=4000000, + max_expanded_ast_elements=4000000, + max_bytes_before_external_group_by=0, + allow_experimental_analyzer=0 + ''' +# --- diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py index 3633092a40e25..f8ce2daccf3d5 100644 --- a/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py +++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_filters.py @@ -1,10 +1,13 @@ from datetime import datetime +from typing import Literal from unittest.mock import ANY from uuid import uuid4 from dateutil.relativedelta import relativedelta from django.utils.timezone import now from freezegun import freeze_time +from parameterized import parameterized + from posthog.clickhouse.client import sync_execute from posthog.clickhouse.log_entries import TRUNCATE_LOG_ENTRIES_TABLE_SQL from posthog.constants import AvailableFeature @@ -78,8 +81,8 @@ def create_event( properties=properties, ) - def _filter_recordings_by(self, recordings_filter: dict) -> SessionRecordingQueryResult: - the_filter = SessionRecordingsFilter(team=self.team, data=recordings_filter) + def _filter_recordings_by(self, recordings_filter: dict | None = None) -> SessionRecordingQueryResult: + the_filter = SessionRecordingsFilter(team=self.team, data=recordings_filter or {}) session_recording_list_instance = SessionRecordingListFromFilters( filter=the_filter, team=self.team, hogql_query_modifiers=None ) @@ -781,6 +784,26 @@ def test_ttl_days(self): with freeze_time("2023-09-05T12:00:01Z"): assert ttl_days(self.team) == 35 + @snapshot_clickhouse_queries + def test_listing_ignores_future_replays(self): + with freeze_time("2023-08-29T12:00:01Z"): + produce_replay_summary(team_id=self.team.id, session_id="29th Aug") + + with freeze_time("2023-09-01T12:00:01Z"): + produce_replay_summary(team_id=self.team.id, session_id="1st-sep") + + with freeze_time("2023-09-02T12:00:01Z"): + produce_replay_summary(team_id=self.team.id, session_id="2nd-sep") + + with freeze_time("2023-09-03T12:00:01Z"): + produce_replay_summary(team_id=self.team.id, session_id="3rd-sep") + + with freeze_time("2023-08-30T12:00:01Z"): + recordings = self._filter_recordings_by() + + # recordings in the future don't show + assert [s["session_id"] for s in recordings.results] == ["29th Aug"] + @snapshot_clickhouse_queries def test_filter_on_session_ids(self): user = "test_session_ids-user" @@ -1613,8 +1636,53 @@ def test_operand_or_event_filters(self): assert len(session_recordings) == 2 assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one]) + @parameterized.expand( + [ + # Case 1: Neither has WARN and message "random" + ( + '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "AND", + 0, + [], + ), + # Case 2: AND only matches one recording + ( + '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "AND", + 1, + ["both_log_filters"], + ), + # Case 3: Only one is WARN level + ( + '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}]', + "AND", + 1, + ["one_log_filter"], + ), + # Case 4: Only one has message "random" + ( + '[{"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "AND", + 1, + ["both_log_filters"], + ), + # Case 5: OR matches both + ( + '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "OR", + 2, + ["both_log_filters", "one_log_filter"], + ), + ] + ) @snapshot_clickhouse_queries - def test_operand_or_filters(self): + def test_operand_or_filters( + self, + console_log_filters: str, + operand: Literal["AND", "OR"], + expected_count: int, + expected_session_ids: list[str], + ) -> None: user = "test_operand_or_filter-user" Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) @@ -1624,13 +1692,10 @@ def test_operand_or_filters(self): session_id=session_with_both_log_filters, first_timestamp=self.an_hour_ago, team_id=self.team.id, - console_warn_count=1, - log_messages={ - "warn": [ - "random", - ], - }, + console_log_count=1, + log_messages={"info": ["random"]}, ) + session_with_one_log_filter = "one_log_filter" produce_replay_summary( distinct_id="user", @@ -1638,29 +1703,15 @@ def test_operand_or_filters(self): first_timestamp=self.an_hour_ago, team_id=self.team.id, console_warn_count=1, - log_messages={ - "warn": [ - "warn", - ], - }, + log_messages={"warn": ["warn"]}, ) - (session_recordings, _, _) = self._filter_recordings_by( - { - "console_log_filters": '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', - "operand": "AND", - } + session_recordings, _, _ = self._filter_recordings_by( + {"console_log_filters": console_log_filters, "operand": operand} ) - assert len(session_recordings) == 1 - assert session_recordings[0]["session_id"] == session_with_both_log_filters - (session_recordings, _, _) = self._filter_recordings_by( - { - "console_log_filters": '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', - "operand": "OR", - } - ) - assert len(session_recordings) == 2 + assert len(session_recordings) == expected_count + assert sorted([rec["session_id"] for rec in session_recordings]) == sorted(expected_session_ids) @snapshot_clickhouse_queries def test_operand_or_mandatory_filters(self): @@ -3028,19 +3079,42 @@ def test_filter_for_recordings_with_mixed_console_counts(self): ] ) + @parameterized.expand( + [ + # Case 1: OR operand, message 4 matches in warn and error + ( + '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 4", "operator": "icontains", "type": "log_entry"}]', + "OR", + ["with-errors-session", "with-two-session", "with-warns-session", "with-logs-session"], + ), + # Case 2: AND operand, message 5 matches only in warn + ( + '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 5", "operator": "icontains", "type": "log_entry"}]', + "AND", + ["with-warns-session"], + ), + # Case 3: AND operand, message 5 does not match log level "info" + ( + '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 5", "operator": "icontains", "type": "log_entry"}]', + "AND", + [], + ), + ] + ) @snapshot_clickhouse_queries @freeze_time("2021-01-21T20:00:00.000Z") - def test_filter_for_recordings_by_console_text(self): + def test_filter_for_recordings_by_console_text( + self, + console_log_filters: str, + operand: Literal["AND", "OR"], + expected_session_ids: list[str], + ) -> None: Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - with_logs_session_id = "with-logs-session" - with_warns_session_id = "with-warns-session" - with_errors_session_id = "with-errors-session" - with_two_session_id = "with-two-session" - + # Create sessions produce_replay_summary( distinct_id="user", - session_id=with_logs_session_id, + session_id="with-logs-session", first_timestamp=self.an_hour_ago, team_id=self.team.id, console_log_count=4, @@ -3055,7 +3129,7 @@ def test_filter_for_recordings_by_console_text(self): ) produce_replay_summary( distinct_id="user", - session_id=with_warns_session_id, + session_id="with-warns-session", first_timestamp=self.an_hour_ago, team_id=self.team.id, console_warn_count=5, @@ -3071,7 +3145,7 @@ def test_filter_for_recordings_by_console_text(self): ) produce_replay_summary( distinct_id="user", - session_id=with_errors_session_id, + session_id="with-errors-session", first_timestamp=self.an_hour_ago, team_id=self.team.id, console_error_count=4, @@ -3086,7 +3160,7 @@ def test_filter_for_recordings_by_console_text(self): ) produce_replay_summary( distinct_id="user", - session_id=with_two_session_id, + session_id="with-two-session", first_timestamp=self.an_hour_ago, team_id=self.team.id, console_error_count=4, @@ -3101,46 +3175,24 @@ def test_filter_for_recordings_by_console_text(self): "info": ["log message 1", "log message 2", "log message 3"], }, ) - - (session_recordings, _, _) = self._filter_recordings_by( - { - # there are 5 warn and 4 error logs, message 4 matches in both - "console_log_filters": '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 4", "operator": "exact", "type": "log_entry"}]', - "operand": "OR", - } - ) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( - [ - with_errors_session_id, - with_two_session_id, - with_warns_session_id, - ] - ) - - (session_recordings, _, _) = self._filter_recordings_by( - { - # there are 5 warn and 4 error logs, message 5 matches only matches in warn - "console_log_filters": '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 5", "operator": "icontains", "type": "log_entry"}]', - "operand": "AND", - } - ) - - assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( - [ - with_warns_session_id, - ] + produce_replay_summary( + distinct_id="user", + session_id="with-no-matches-session", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_error_count=4, + console_log_count=3, + log_messages={ + "info": ["log message 1", "log message 2", "log message 3"], + }, ) - (session_recordings, _, _) = self._filter_recordings_by( - { - # message 5 does not match log level "info" - "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 5", "operator": "icontains", "type": "log_entry"}]', - "operand": "AND", - } + # Perform the filtering and validate results + session_recordings, _, _ = self._filter_recordings_by( + {"console_log_filters": console_log_filters, "operand": operand} ) - assert sorted([sr["session_id"] for sr in session_recordings]) == [] + assert sorted([sr["session_id"] for sr in session_recordings]) == sorted(expected_session_ids) @snapshot_clickhouse_queries def test_filter_for_recordings_by_snapshot_source(self): diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_query.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_query.py new file mode 100644 index 0000000000000..abfd8ab98b956 --- /dev/null +++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_query.py @@ -0,0 +1,4170 @@ +from datetime import datetime +from typing import Literal +from unittest.mock import ANY +from uuid import uuid4 + +from dateutil.relativedelta import relativedelta +from django.utils.timezone import now +from freezegun import freeze_time +from parameterized import parameterized + +from posthog.clickhouse.client import sync_execute +from posthog.clickhouse.log_entries import TRUNCATE_LOG_ENTRIES_TABLE_SQL +from posthog.constants import AvailableFeature +from posthog.models import Cohort, GroupTypeMapping, Person +from posthog.models.action import Action +from posthog.models.group.util import create_group +from posthog.models.team import Team +from posthog.schema import RecordingsQuery +from posthog.session_recordings.queries.session_recording_list_from_query import ( + SessionRecordingQueryResult, +) +from posthog.session_recordings.queries.session_recording_list_from_query import SessionRecordingListFromQuery +from posthog.session_recordings.queries.session_replay_events import ttl_days +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) +from posthog.session_recordings.session_recording_api import query_as_params_to_dict +from posthog.session_recordings.sql.session_replay_event_sql import ( + TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL, +) +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + also_test_with_materialized_columns, + flush_persons_and_events, + snapshot_clickhouse_queries, +) + + +@freeze_time("2021-01-01T13:46:23") +class TestSessionRecordingsListFromQuery(ClickhouseTestMixin, APIBaseTest): + def setUp(self): + super().setUp() + sync_execute(TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL()) + sync_execute(TRUNCATE_LOG_ENTRIES_TABLE_SQL) + + def create_action(self, name, team_id=None, properties=None): + if team_id is None: + team_id = self.team.pk + if properties is None: + properties = [] + action = Action.objects.create( + team_id=team_id, + name=name, + steps_json=[ + { + "event": name, + "properties": properties, + } + ], + ) + return action + + def create_event( + self, + distinct_id, + timestamp, + team=None, + event_name="$pageview", + properties=None, + ): + if team is None: + team = self.team + if properties is None: + properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"} + return _create_event( + team=team, + event=event_name, + timestamp=timestamp, + distinct_id=distinct_id, + properties=properties, + ) + + def _filter_recordings_by(self, recordings_filter: dict | None = None) -> SessionRecordingQueryResult: + the_query = RecordingsQuery.model_validate(query_as_params_to_dict(recordings_filter or {})) + session_recording_list_instance = SessionRecordingListFromQuery( + query=the_query, team=self.team, hogql_query_modifiers=None + ) + return session_recording_list_instance.run() + + @property + def an_hour_ago(self): + return (now() - relativedelta(hours=1)).replace(microsecond=0, second=0) + + @snapshot_clickhouse_queries + def test_basic_query(self): + user = "test_basic_query-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_one = f"test_basic_query-{str(uuid4())}" + session_id_two = f"test_basic_query-{str(uuid4())}" + + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=self.an_hour_ago.isoformat().replace("T", " "), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=20)).isoformat().replace("T", " "), + distinct_id=user, + first_url="https://example.io/home", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=50 * 1000 * 0.5, # 50% of the total expected duration + ) + + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=(self.an_hour_ago + relativedelta(seconds=10)), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), + distinct_id=user, + first_url="https://a-different-url.com", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=0, # 30% of the total expected duration + ) + + produce_replay_summary( + session_id=session_id_two, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=(self.an_hour_ago + relativedelta(seconds=20)), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=2000)), + distinct_id=user, + first_url="https://another-url.com", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=1980 * 1000 * 0.4, # 40% of the total expected duration + ) + + session_recordings, more_recordings_available, _ = self._filter_recordings_by() + + assert session_recordings == [ + { + "session_id": session_id_two, + "activity_score": 40.16, + "team_id": self.team.pk, + "distinct_id": user, + "click_count": 2, + "keypress_count": 2, + "mouse_activity_count": 2, + "duration": 1980, + "active_seconds": 792.0, + "inactive_seconds": 1188.0, + "start_time": self.an_hour_ago + relativedelta(seconds=20), + "end_time": self.an_hour_ago + relativedelta(seconds=2000), + "first_url": "https://another-url.com", + "console_log_count": 0, + "console_warn_count": 0, + "console_error_count": 0, + "ongoing": 1, + }, + { + "session_id": session_id_one, + "activity_score": 61.11, + "team_id": self.team.pk, + "distinct_id": user, + "click_count": 4, + "keypress_count": 4, + "mouse_activity_count": 4, + "duration": 50, + "active_seconds": 25.0, + "inactive_seconds": 25.0, + "start_time": self.an_hour_ago, + "end_time": self.an_hour_ago + relativedelta(seconds=50), + "first_url": "https://example.io/home", + "console_log_count": 0, + "console_warn_count": 0, + "console_error_count": 0, + "ongoing": 1, + }, + ] + + assert more_recordings_available is False + + @snapshot_clickhouse_queries + def test_basic_query_active_sessions( + self, + ): + user = "test_basic_query-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_total_is_61 = f"test_basic_query_active_sessions-total-{str(uuid4())}" + session_id_active_is_61 = f"test_basic_query_active_sessions-active-{str(uuid4())}" + session_id_inactive_is_61 = f"test_basic_query_active_sessions-inactive-{str(uuid4())}" + + produce_replay_summary( + session_id=session_id_total_is_61, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=self.an_hour_ago.isoformat().replace("T", " "), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=61)).isoformat().replace("T", " "), + distinct_id=user, + first_url="https://example.io/home", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=59000, + ) + + produce_replay_summary( + session_id=session_id_active_is_61, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=59)), + distinct_id=user, + first_url="https://a-different-url.com", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=61000, + ) + + produce_replay_summary( + session_id=session_id_inactive_is_61, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=61)), + distinct_id=user, + first_url="https://a-different-url.com", + click_count=0, + keypress_count=0, + mouse_activity_count=0, + active_milliseconds=0, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"gt"}]', + } + ) + + assert sorted( + [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings], + key=lambda x: x[0], + ) == [ + (session_id_inactive_is_61, 61, 0.0), + (session_id_total_is_61, 61, 59.0), + ] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "having_predicates": '[{"type":"recording","key":"active_seconds","value":"60","operator":"gt"}]', + } + ) + + assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ + (session_id_active_is_61, 59, 61.0) + ] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "having_predicates": '[{"type":"recording","key":"inactive_seconds","value":"60","operator":"gt"}]', + } + ) + + assert [(s["session_id"], s["duration"], s["inactive_seconds"]) for s in session_recordings] == [ + (session_id_inactive_is_61, 61, 61.0) + ] + + @snapshot_clickhouse_queries + def test_sessions_with_current_data( + self, + ): + user = "test_sessions_with_current_data-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_inactive = f"test_sessions_with_current_data-inactive-{str(uuid4())}" + session_id_active = f"test_sessions_with_current_data-active-{str(uuid4())}" + + produce_replay_summary( + session_id=session_id_inactive, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago, + last_timestamp=self.an_hour_ago + relativedelta(seconds=60), + distinct_id=user, + first_url="https://example.io/home", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=59000, + kafka_timestamp=(datetime.utcnow() - relativedelta(minutes=6)), + ) + + produce_replay_summary( + session_id=session_id_active, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago, + last_timestamp=self.an_hour_ago + relativedelta(seconds=60), + distinct_id=user, + first_url="https://a-different-url.com", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=61000, + kafka_timestamp=(datetime.utcnow() - relativedelta(minutes=3)), + ) + + (session_recordings, _, _) = self._filter_recordings_by({}) + assert sorted( + [(s["session_id"], s["ongoing"]) for s in session_recordings], + key=lambda x: x[0], + ) == [ + (session_id_active, 1), + (session_id_inactive, 0), + ] + + @snapshot_clickhouse_queries + def test_basic_query_with_paging(self): + user = "test_basic_query_with_paging-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_one = f"id_one_test_basic_query_with_paging-{str(uuid4())}" + session_id_two = f"id_two_test_basic_query_with_paging-{str(uuid4())}" + + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=self.an_hour_ago.isoformat().replace("T", " "), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=20)).isoformat().replace("T", " "), + distinct_id=user, + first_url="https://example.io/home", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=50 * 1000 * 0.5, # 50% of the total expected duration + ) + + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=(self.an_hour_ago + relativedelta(seconds=10)), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), + distinct_id=user, + first_url="https://a-different-url.com", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=0, # 30% of the total expected duration + ) + + produce_replay_summary( + session_id=session_id_two, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=(self.an_hour_ago + relativedelta(seconds=20)), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=2000)), + distinct_id=user, + first_url="https://another-url.com", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=1980 * 1000 * 0.4, # 40% of the total expected duration + ) + + (session_recordings, more_recordings_available, _) = self._filter_recordings_by({"limit": 1, "offset": 0}) + + assert session_recordings == [ + { + "activity_score": 40.16, + "session_id": session_id_two, + "team_id": self.team.pk, + "distinct_id": user, + "click_count": 2, + "keypress_count": 2, + "mouse_activity_count": 2, + "duration": 1980, + "active_seconds": 792.0, + "inactive_seconds": 1188.0, + "start_time": self.an_hour_ago + relativedelta(seconds=20), + "end_time": self.an_hour_ago + relativedelta(seconds=2000), + "first_url": "https://another-url.com", + "console_log_count": 0, + "console_warn_count": 0, + "console_error_count": 0, + "ongoing": 1, + } + ] + + assert more_recordings_available is True + + (session_recordings, more_recordings_available, _) = self._filter_recordings_by({"limit": 1, "offset": 1}) + + assert session_recordings == [ + { + "session_id": session_id_one, + "activity_score": 61.11, + "team_id": self.team.pk, + "distinct_id": user, + "click_count": 4, + "keypress_count": 4, + "mouse_activity_count": 4, + "duration": 50, + "active_seconds": 25.0, + "inactive_seconds": 25.0, + "start_time": self.an_hour_ago, + "end_time": self.an_hour_ago + relativedelta(seconds=50), + "first_url": "https://example.io/home", + "console_log_count": 0, + "console_warn_count": 0, + "console_error_count": 0, + "ongoing": 1, + }, + ] + + assert more_recordings_available is False + + (session_recordings, more_recordings_available, _) = self._filter_recordings_by({"limit": 1, "offset": 2}) + + assert session_recordings == [] + + assert more_recordings_available is False + + @snapshot_clickhouse_queries + def test_basic_query_with_ordering(self): + user = "test_basic_query_with_ordering-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_one = f"test_basic_query_with_ordering-session-1-{str(uuid4())}" + session_id_two = f"test_basic_query_with_ordering-session-2-{str(uuid4())}" + + session_one_start = self.an_hour_ago + relativedelta(seconds=10) + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=session_one_start, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), + distinct_id=user, + console_error_count=1000, + active_milliseconds=1, # most errors, but the least activity + ) + + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=session_one_start, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), + distinct_id=user, + console_error_count=12, + active_milliseconds=1, # most errors, but the least activity + ) + + session_two_start = self.an_hour_ago + produce_replay_summary( + session_id=session_id_two, + team_id=self.team.pk, + # starts before session one + first_timestamp=session_two_start, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), + distinct_id=user, + console_error_count=430, + active_milliseconds=1000, # most activity, but the least errors + ) + + (session_recordings) = self._filter_recordings_by({"limit": 3, "offset": 0, "order": "active_seconds"}) + + ordered_by_activity = [(r["session_id"], r["active_seconds"]) for r in session_recordings.results] + assert ordered_by_activity == [(session_id_two, 1.0), (session_id_one, 0.002)] + + (session_recordings) = self._filter_recordings_by({"limit": 3, "offset": 0, "order": "console_error_count"}) + + ordered_by_errors = [(r["session_id"], r["console_error_count"]) for r in session_recordings.results] + assert ordered_by_errors == [(session_id_one, 1012), (session_id_two, 430)] + + (session_recordings) = self._filter_recordings_by({"limit": 3, "offset": 0, "order": "start_time"}) + + ordered_by_default = [(r["session_id"], r["start_time"]) for r in session_recordings.results] + assert ordered_by_default == [(session_id_one, session_one_start), (session_id_two, session_two_start)] + + def test_first_url_selection(self): + user = "test_first_url_selection-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_one = f"first-url-on-first-event-{str(uuid4())}" + session_id_two = f"first-url-not-on-first-event-{str(uuid4())}" + session_id_three = f"no-url-from-many-{str(uuid4())}" + session_id_four = f"events-inserted-out-of-order-{str(uuid4())}" + + # session one has the first url on the first event + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=self.an_hour_ago, + last_timestamp=self.an_hour_ago + relativedelta(seconds=20), + first_url="https://on-first-event.com", + ) + + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago + relativedelta(seconds=10), + last_timestamp=self.an_hour_ago + relativedelta(seconds=20), + first_url="https://on-second-event.com", + ) + + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago + relativedelta(seconds=20), + last_timestamp=self.an_hour_ago + relativedelta(seconds=40), + first_url="https://on-third-event.com", + ) + + # session two has no URL on the first event + produce_replay_summary( + session_id=session_id_two, + team_id=self.team.pk, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=10)), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=50)), + first_url=None, + ) + + produce_replay_summary( + session_id=session_id_two, + team_id=self.team.pk, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=20)), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + first_url="https://first-is-on-second-event.com", + ) + + produce_replay_summary( + session_id=session_id_two, + team_id=self.team.pk, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=25)), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + first_url="https://another-on-the-session.com", + ) + + # session three has no URLs + produce_replay_summary( + session_id=session_id_three, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago, + last_timestamp=self.an_hour_ago + relativedelta(seconds=50), + distinct_id=user, + first_url=None, + ) + + produce_replay_summary( + session_id=session_id_three, + team_id=self.team.pk, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=10)), + last_timestamp=self.an_hour_ago + relativedelta(seconds=50), + distinct_id=user, + first_url=None, + ) + + produce_replay_summary( + session_id=session_id_three, + team_id=self.team.pk, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=20)), + last_timestamp=self.an_hour_ago + relativedelta(seconds=60), + distinct_id=user, + first_url=None, + ) + + # session four events are received out of order + produce_replay_summary( + session_id=session_id_four, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago + relativedelta(seconds=20), + last_timestamp=self.an_hour_ago + relativedelta(seconds=25), + first_url="https://on-first-received-event.com", + ) + produce_replay_summary( + session_id=session_id_four, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago + relativedelta(seconds=10), + last_timestamp=self.an_hour_ago + relativedelta(seconds=25), + first_url="https://on-second-received-event-but-actually-first.com", + ) + + session_recordings, more_recordings_available, _ = self._filter_recordings_by() + + assert sorted( + [{"session_id": r["session_id"], "first_url": r["first_url"]} for r in session_recordings], + key=lambda x: x["session_id"], + ) == sorted( + [ + { + "session_id": session_id_one, + "first_url": "https://on-first-event.com", + }, + { + "session_id": session_id_two, + "first_url": "https://first-is-on-second-event.com", + }, + { + "session_id": session_id_three, + "first_url": None, + }, + { + "session_id": session_id_four, + "first_url": "https://on-second-received-event-but-actually-first.com", + }, + ], + # mypy unhappy about this lambda when first_url can be None 🤷️ + key=lambda x: x["session_id"], # type: ignore + ) + + def test_recordings_dont_leak_data_between_teams(self): + another_team = Team.objects.create(organization=self.organization) + user = "test_recordings_dont_leak_data_between_teams-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + Person.objects.create(team=another_team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_one = f"test_recordings_dont_leak_data_between_teams-1-{str(uuid4())}" + session_id_two = f"test_recordings_dont_leak_data_between_teams-2-{str(uuid4())}" + + produce_replay_summary( + session_id=session_id_one, + team_id=another_team.pk, + distinct_id=user, + first_timestamp=self.an_hour_ago, + last_timestamp=self.an_hour_ago + relativedelta(seconds=20), + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=20 * 1000 * 0.5, # 50% of the total expected duration + ) + + produce_replay_summary( + session_id=session_id_two, + team_id=self.team.pk, + distinct_id=user, + first_timestamp=self.an_hour_ago, + last_timestamp=self.an_hour_ago + relativedelta(seconds=20), + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=20 * 1000 * 0.5, # 50% of the total expected duration + ) + + (session_recordings, _, _) = self._filter_recordings_by() + + assert [{"session": r["session_id"], "user": r["distinct_id"]} for r in session_recordings] == [ + {"session": session_id_two, "user": user} + ] + + @snapshot_clickhouse_queries + def test_event_filter(self): + user = "test_event_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + session_id_one = f"test_event_filter-{str(uuid4())}" + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + user, + self.an_hour_ago, + properties={"$session_id": session_id_one, "$window_id": str(uuid4())}, + ) + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$autocapture", + "type": "events", + "order": 0, + "name": "$autocapture", + } + ] + } + ) + assert session_recordings == [] + + @snapshot_clickhouse_queries + def test_event_filter_has_ttl_applied_too(self): + user = "test_event_filter_has_ttl_applied_too-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + session_id_one = f"test_event_filter_has_ttl_applied_too-{str(uuid4())}" + + # this is artificially incorrect data, the session events are within TTL + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + # but the page view event is outside TTL + self.create_event( + user, + self.an_hour_ago - relativedelta(days=SessionRecordingListFromQuery.SESSION_RECORDINGS_DEFAULT_LIMIT + 1), + properties={"$session_id": session_id_one, "$window_id": str(uuid4())}, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + } + ) + assert len(session_recordings) == 0 + + (session_recordings, _, _) = self._filter_recordings_by({}) + # without an event filter the recording is present, showing that the TTL was applied to the events table too + # we want this to limit the amount of event data we query + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + @snapshot_clickhouse_queries + def test_ttl_days(self): + assert ttl_days(self.team) == 21 + + with self.is_cloud(True): + # Far enough in the future from `days_since_blob_ingestion` but not paid + with freeze_time("2023-09-01T12:00:01Z"): + assert ttl_days(self.team) == 30 + + self.team.organization.available_product_features = [ + {"key": AvailableFeature.RECORDINGS_PLAYLISTS, "name": AvailableFeature.RECORDINGS_PLAYLISTS} + ] + + # Far enough in the future from `days_since_blob_ingestion` but paid + with freeze_time("2023-12-01T12:00:01Z"): + assert ttl_days(self.team) == 90 + + # Not far enough in the future from `days_since_blob_ingestion` + with freeze_time("2023-09-05T12:00:01Z"): + assert ttl_days(self.team) == 35 + + @snapshot_clickhouse_queries + def test_listing_ignores_future_replays(self): + with freeze_time("2023-08-29T12:00:01Z"): + produce_replay_summary(team_id=self.team.id, session_id="29th Aug") + + with freeze_time("2023-09-01T12:00:01Z"): + produce_replay_summary(team_id=self.team.id, session_id="1st-sep") + + with freeze_time("2023-09-02T12:00:01Z"): + produce_replay_summary(team_id=self.team.id, session_id="2nd-sep") + + with freeze_time("2023-09-03T12:00:01Z"): + produce_replay_summary(team_id=self.team.id, session_id="3rd-sep") + + with freeze_time("2023-08-30T12:00:01Z"): + recordings = self._filter_recordings_by() + + # recordings in the future don't show + assert [s["session_id"] for s in recordings.results] == ["29th Aug"] + + @snapshot_clickhouse_queries + def test_filter_on_session_ids(self): + user = "test_session_ids-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + first_session_id = str(uuid4()) + second_session_id = str(uuid4()) + third_session_id = str(uuid4()) + + produce_replay_summary( + session_id=first_session_id, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(minutes=5)), + distinct_id=user, + first_url="https://example.io/home", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=59000, + ) + + produce_replay_summary( + session_id=second_session_id, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(minutes=1)), + distinct_id=user, + first_url="https://example.io/home", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=61000, + ) + + produce_replay_summary( + session_id=third_session_id, + team_id=self.team.pk, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(minutes=10)), + distinct_id=user, + first_url="https://example.io/home", + click_count=0, + keypress_count=0, + mouse_activity_count=0, + active_milliseconds=0, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "session_ids": [first_session_id], + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == first_session_id + + (session_recordings, _, _) = self._filter_recordings_by( + { + "session_ids": [first_session_id, second_session_id], + } + ) + + assert sorted([s["session_id"] for s in session_recordings]) == sorted( + [ + first_session_id, + second_session_id, + ] + ) + + @snapshot_clickhouse_queries + def test_event_filter_with_active_sessions( + self, + ): + user = "test_basic_query-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_total_is_61 = f"test_basic_query_active_sessions-total-{str(uuid4())}" + session_id_active_is_61 = f"test_basic_query_active_sessions-active-{str(uuid4())}" + + self.create_event( + user, + self.an_hour_ago, + properties={ + "$session_id": session_id_total_is_61, + "$window_id": str(uuid4()), + }, + ) + produce_replay_summary( + session_id=session_id_total_is_61, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=self.an_hour_ago.isoformat().replace("T", " "), + last_timestamp=(self.an_hour_ago + relativedelta(seconds=61)).isoformat().replace("T", " "), + distinct_id=user, + first_url="https://example.io/home", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=59000, + ) + + self.create_event( + user, + self.an_hour_ago, + properties={ + "$session_id": session_id_active_is_61, + "$window_id": str(uuid4()), + }, + ) + produce_replay_summary( + session_id=session_id_active_is_61, + team_id=self.team.pk, + # can CH handle a timestamp with no T + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=59)), + distinct_id=user, + first_url="https://a-different-url.com", + click_count=2, + keypress_count=2, + mouse_activity_count=2, + active_milliseconds=61000, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"gt"}]', + } + ) + + assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ + (session_id_total_is_61, 61, 59.0) + ] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "having_predicates": '[{"type":"recording","key":"active_seconds","value":60,"operator":"gt"}]', + } + ) + + assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ + (session_id_active_is_61, 59, 61.0) + ] + + @also_test_with_materialized_columns(["$current_url", "$browser"]) + @snapshot_clickhouse_queries + def test_event_filter_with_properties(self): + user = "test_event_filter_with_properties-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + session_id_one = f"test_event_filter_with_properties-{str(uuid4())}" + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + user, + self.an_hour_ago, + properties={ + "$browser": "Chrome", + "$session_id": session_id_one, + "$window_id": str(uuid4()), + }, + ) + self.create_event( + user, + self.an_hour_ago, + event_name="a_different_event", + properties={ + "$browser": "Safari", + "$session_id": session_id_one, + "$window_id": str(uuid4()), + }, + ) + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + "properties": [ + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "event", + } + ], + } + ] + } + ) + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + "properties": [ + { + "key": "$browser", + "value": ["Firefox"], + "operator": "exact", + "type": "event", + } + ], + } + ] + } + ) + assert session_recordings == [] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "a_different_event", + "type": "events", + "order": 0, + "name": "a_different_event", + "properties": [ + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "event", + } + ], + } + ] + } + ) + assert len(session_recordings) == 0 + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "a_different_event", + "type": "events", + "order": 0, + "name": "a_different_event", + "properties": [ + { + "key": "$browser", + "value": ["Safari"], + "operator": "exact", + "type": "event", + } + ], + } + ] + } + ) + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + @snapshot_clickhouse_queries + def test_multiple_event_filters(self): + session_id = f"test_multiple_event_filters-{str(uuid4())}" + user = "test_multiple_event_filters-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + + self.create_event( + user, + self.an_hour_ago, + properties={"$session_id": session_id, "$window_id": "1", "foo": "bar"}, + ) + self.create_event( + user, + self.an_hour_ago, + properties={"$session_id": session_id, "$window_id": "1", "bar": "foo"}, + ) + self.create_event( + user, + self.an_hour_ago, + properties={"$session_id": session_id, "$window_id": "1", "bar": "foo"}, + event_name="new-event", + ) + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "new-event", + "type": "events", + "order": 0, + "name": "new-event", + }, + ] + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "new-event2", + "type": "events", + "order": 0, + "name": "new-event2", + }, + ] + } + ) + assert session_recordings == [] + + # it uses hasAny instead of hasAll because of the OR filter + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "new-event2", + "type": "events", + "order": 0, + "name": "new-event2", + }, + ], + "operand": "OR", + } + ) + assert len(session_recordings) == 1 + + # two events with the same name + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "name": "$pageview", + "properties": [{"key": "foo", "value": ["bar"], "operator": "exact", "type": "event"}], + }, + { + "id": "$pageview", + "type": "events", + "name": "$pageview", + "properties": [{"key": "bar", "value": ["foo"], "operator": "exact", "type": "event"}], + }, + ], + "operand": "AND", + } + ) + assert len(session_recordings) == 1 + + # two events with different names + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "name": "$pageview", + "properties": [{"key": "foo", "value": ["bar"], "operator": "exact", "type": "event"}], + }, + { + "id": "new-event", + "type": "events", + "name": "new-event", + "properties": [{"key": "foo", "value": ["bar"], "operator": "exact", "type": "event"}], + }, + ], + "operand": "AND", + } + ) + assert len(session_recordings) == 0 + + # two events with different names + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "name": "$pageview", + "properties": [{"key": "foo", "value": ["bar"], "operator": "exact", "type": "event"}], + }, + { + "id": "new-event", + "type": "events", + "name": "new-event", + "properties": [{"key": "foo", "value": ["bar"], "operator": "exact", "type": "event"}], + }, + ], + "operand": "OR", + } + ) + assert len(session_recordings) == 1 + + @snapshot_clickhouse_queries + @also_test_with_materialized_columns(["$session_id", "$browser"], person_properties=["email"]) + @freeze_time("2023-01-04") + def test_action_filter(self): + user = "test_action_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + session_id_one = f"test_action_filter-session-one" + window_id = "test_action_filter-window-id" + action_with_properties = self.create_action( + "custom-event", + properties=[ + {"key": "$browser", "value": "Firefox"}, + {"key": "$session_id", "value": session_id_one}, + {"key": "$window_id", "value": window_id}, + ], + ) + action_without_properties = self.create_action( + name="custom-event", + properties=[ + {"key": "$session_id", "value": session_id_one}, + {"key": "$window_id", "value": window_id}, + ], + ) + + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + user, + self.an_hour_ago, + event_name="custom-event", + properties={ + "$browser": "Chrome", + "$session_id": session_id_one, + "$window_id": window_id, + }, + ) + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "actions": [ + { + "id": action_with_properties.id, + "type": "actions", + "order": 1, + "name": "custom-event", + } + ] + } + ) + assert session_recordings == [] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "actions": [ + { + "id": action_without_properties.id, + "type": "actions", + "order": 1, + "name": "custom-event", + } + ] + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + # Adding properties to an action + (session_recordings, _, _) = self._filter_recordings_by( + { + "actions": [ + { + "id": action_without_properties.id, + "type": "actions", + "order": 1, + "name": "custom-event", + "properties": [ + { + "key": "$browser", + "value": ["Firefox"], + "operator": "exact", + "type": "event", + } + ], + } + ] + } + ) + assert session_recordings == [] + + # Adding matching properties to an action + (session_recordings, _, _) = self._filter_recordings_by( + { + "actions": [ + { + "id": action_without_properties.id, + "type": "actions", + "order": 1, + "name": "custom-event", + "properties": [ + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "event", + } + ], + } + ] + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + def test_all_sessions_recording_object_keys_with_entity_filter(self): + user = "test_all_sessions_recording_object_keys_with_entity_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + session_id = f"test_all_sessions_recording_object_keys_with_entity_filter-{str(uuid4())}" + window_id = str(uuid4()) + + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=60)), + team_id=self.team.id, + first_url="https://recieved-out-of-order.com/second", + ) + self.create_event( + user, + self.an_hour_ago, + properties={"$session_id": session_id, "$window_id": window_id}, + ) + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + first_url="https://recieved-out-of-order.com/first", + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + } + ) + + assert session_recordings == [ + { + "activity_score": 0, + "session_id": session_id, + "distinct_id": user, + "duration": 60, + "start_time": self.an_hour_ago, + "end_time": self.an_hour_ago + relativedelta(seconds=60), + "active_seconds": 0.0, + "click_count": 0, + "first_url": "https://recieved-out-of-order.com/first", + "inactive_seconds": 60.0, + "keypress_count": 0, + "mouse_activity_count": 0, + "team_id": self.team.id, + "console_log_count": 0, + "console_warn_count": 0, + "console_error_count": 0, + "ongoing": 1, + } + ] + + @snapshot_clickhouse_queries + def test_duration_filter(self): + user = "test_duration_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_one = "session one is 29 seconds long" + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=29)), + team_id=self.team.id, + ) + + session_id_two = "session two is 61 seconds long" + produce_replay_summary( + distinct_id=user, + session_id=session_id_two, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=61)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + {"having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"gt"}]'} + ) + assert [r["session_id"] for r in session_recordings] == [session_id_two] + + (session_recordings, _, _) = self._filter_recordings_by( + {"having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"lt"}]'} + ) + assert [r["session_id"] for r in session_recordings] == [session_id_one] + + @snapshot_clickhouse_queries + def test_operand_or_person_filters(self): + user = "test_operand_or_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "test@posthog.com"}) + + second_user = "test_operand_or_filter-second_user" + Person.objects.create(team=self.team, distinct_ids=[second_user], properties={"email": "david@posthog.com"}) + + session_id_one = "session_id_one" + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + session_id_two = "session_id_two" + produce_replay_summary( + distinct_id=second_user, + session_id=session_id_two, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "email", + "value": ["test@posthog.com"], + "operator": "exact", + "type": "person", + }, + { + "key": "email", + "value": ["david@posthog.com"], + "operator": "exact", + "type": "person", + }, + ], + "operand": "AND", + } + ) + assert len(session_recordings) == 0 + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "email", + "value": ["test@posthog.com"], + "operator": "exact", + "type": "person", + }, + { + "key": "email", + "value": ["david@posthog.com"], + "operator": "exact", + "type": "person", + }, + ], + "operand": "OR", + } + ) + assert len(session_recordings) == 2 + assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_one, session_id_two]) + + @snapshot_clickhouse_queries + def test_operand_or_event_filters(self): + user = "test_operand_or_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "test@posthog.com"}) + + second_user = "test_operand_or_filter-second_user" + Person.objects.create(team=self.team, distinct_ids=[second_user], properties={"email": "david@posthog.com"}) + + session_id_one = "session_id_one" + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + self.create_event( + user, + self.an_hour_ago + relativedelta(seconds=10), + properties={"$session_id": session_id_one}, + ) + + session_id_two = "session_id_two" + produce_replay_summary( + distinct_id=second_user, + session_id=session_id_two, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + self.create_event( + user, + self.an_hour_ago + relativedelta(seconds=10), + event_name="custom_event", + properties={"$session_id": session_id_two}, + ) + + session_id_three = "session_id_three" + produce_replay_summary( + distinct_id=second_user, + session_id=session_id_three, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "custom_event", + "type": "events", + "order": 0, + "name": "custom_event", + }, + ], + "operand": "AND", + } + ) + assert len(session_recordings) == 0 + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "custom_event", + "type": "events", + "order": 0, + "name": "custom_event", + }, + ], + "operand": "OR", + } + ) + assert len(session_recordings) == 2 + assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one]) + + @parameterized.expand( + [ + # Case 1: Neither has WARN and message "random" + ( + '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "AND", + 0, + [], + ), + # Case 2: AND only matches one recording + ( + '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "AND", + 1, + ["both_log_filters"], + ), + # Case 3: Only one is WARN level + ( + '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}]', + "AND", + 1, + ["one_log_filter"], + ), + # Case 4: Only one has message "random" + ( + '[{"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "AND", + 1, + ["both_log_filters"], + ), + # Case 5: OR matches both + ( + '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "random", "operator": "exact", "type": "log_entry"}]', + "OR", + 2, + ["both_log_filters", "one_log_filter"], + ), + ] + ) + @snapshot_clickhouse_queries + def test_operand_or_filters( + self, + console_log_filters: str, + operand: Literal["AND", "OR"], + expected_count: int, + expected_session_ids: list[str], + ) -> None: + user = "test_operand_or_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_with_both_log_filters = "both_log_filters" + produce_replay_summary( + distinct_id="user", + session_id=session_with_both_log_filters, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_log_count=1, + log_messages={"info": ["random"]}, + ) + + session_with_one_log_filter = "one_log_filter" + produce_replay_summary( + distinct_id="user", + session_id=session_with_one_log_filter, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_warn_count=1, + log_messages={"warn": ["warn"]}, + ) + + session_recordings, _, _ = self._filter_recordings_by( + {"console_log_filters": console_log_filters, "operand": operand} + ) + + assert len(session_recordings) == expected_count + assert sorted([rec["session_id"] for rec in session_recordings]) == sorted(expected_session_ids) + + @snapshot_clickhouse_queries + def test_operand_or_mandatory_filters(self): + user = "test_operand_or_filter-user" + person = Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + second_user = "test_operand_or_filter-second_user" + second_person = Person.objects.create(team=self.team, distinct_ids=[second_user], properties={"email": "bla"}) + + session_id_one = "session_id_one" + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + self.create_event( + user, + self.an_hour_ago + relativedelta(seconds=10), + properties={"$session_id": session_id_one}, + ) + + session_id_two = "session_id_two" + produce_replay_summary( + distinct_id=second_user, + session_id=session_id_two, + first_timestamp=self.an_hour_ago, + last_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + # person or event filter -> person matches, event matches -> returns session + (session_recordings, _, _) = self._filter_recordings_by( + { + "person_uuid": str(person.uuid), + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "operand": "OR", + } + ) + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + # person or event filter -> person does not match, event matches -> does not return session + (session_recordings, _, _) = self._filter_recordings_by( + { + "person_uuid": str(second_person.uuid), + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "operand": "OR", + } + ) + assert len(session_recordings) == 0 + + # session_id or event filter -> person matches, event matches -> returns session + (session_recordings, _, _) = self._filter_recordings_by( + { + "session_ids": [session_id_one], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "operand": "OR", + } + ) + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id_one + + # session_id or event filter -> person does not match, event matches -> does not return session + (session_recordings, _, _) = self._filter_recordings_by( + { + "session_ids": [session_id_two], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "operand": "OR", + } + ) + assert len(session_recordings) == 0 + + @snapshot_clickhouse_queries + def test_date_from_filter(self): + user = "test_date_from_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + produce_replay_summary( + distinct_id=user, + session_id="three days before base time", + first_timestamp=(self.an_hour_ago - relativedelta(days=3, seconds=100)), + last_timestamp=(self.an_hour_ago - relativedelta(days=3)), + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user, + session_id="two days before base time", + first_timestamp=(self.an_hour_ago - relativedelta(days=2, seconds=100)), + last_timestamp=(self.an_hour_ago - relativedelta(days=2)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by({"date_from": self.an_hour_ago.strftime("%Y-%m-%d")}) + assert session_recordings == [] + + (session_recordings, _, _) = self._filter_recordings_by( + {"date_from": (self.an_hour_ago - relativedelta(days=2)).strftime("%Y-%m-%d")} + ) + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == "two days before base time" + + @snapshot_clickhouse_queries + def test_date_from_filter_cannot_search_before_ttl(self): + with freeze_time(self.an_hour_ago): + user = "test_date_from_filter_cannot_search_before_ttl-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + produce_replay_summary( + distinct_id=user, + session_id="storage is past ttl", + first_timestamp=(self.an_hour_ago - relativedelta(days=22)), + # an illegally long session but it started 22 days ago + last_timestamp=(self.an_hour_ago - relativedelta(days=3)), + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user, + session_id="storage is not past ttl", + first_timestamp=(self.an_hour_ago - relativedelta(days=19)), + last_timestamp=(self.an_hour_ago - relativedelta(days=2)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + {"date_from": (self.an_hour_ago - relativedelta(days=20)).strftime("%Y-%m-%d")} + ) + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == "storage is not past ttl" + + (session_recordings, _, _) = self._filter_recordings_by( + {"date_from": (self.an_hour_ago - relativedelta(days=21)).strftime("%Y-%m-%d")} + ) + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == "storage is not past ttl" + + (session_recordings, _, _) = self._filter_recordings_by( + {"date_from": (self.an_hour_ago - relativedelta(days=22)).strftime("%Y-%m-%d")} + ) + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == "storage is not past ttl" + + @snapshot_clickhouse_queries + def test_date_to_filter(self): + user = "test_date_to_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + produce_replay_summary( + distinct_id=user, + session_id="three days before base time", + first_timestamp=(self.an_hour_ago - relativedelta(days=3, seconds=100)), + last_timestamp=(self.an_hour_ago - relativedelta(days=3)), + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user, + session_id="two days before base time", + first_timestamp=(self.an_hour_ago - relativedelta(days=2, seconds=100)), + last_timestamp=(self.an_hour_ago - relativedelta(days=2)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + {"date_to": (self.an_hour_ago - relativedelta(days=4)).strftime("%Y-%m-%d")} + ) + assert session_recordings == [] + + (session_recordings, _, _) = self._filter_recordings_by( + {"date_to": (self.an_hour_ago - relativedelta(days=3)).strftime("%Y-%m-%d")} + ) + + assert len(session_recordings) == 1 + assert [s["session_id"] for s in session_recordings] == ["three days before base time"] + + def test_recording_that_spans_time_bounds(self): + user = "test_recording_that_spans_time_bounds-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + day_line = datetime(2021, 11, 5) + session_id = f"session-one-{user}" + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=(day_line - relativedelta(hours=3)), + last_timestamp=(day_line + relativedelta(hours=3)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "date_to": day_line.strftime("%Y-%m-%d"), + "date_from": (day_line - relativedelta(days=10)).strftime("%Y-%m-%d"), + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id + assert session_recordings[0]["duration"] == 6 * 60 * 60 + + @snapshot_clickhouse_queries + def test_person_id_filter(self): + three_user_ids = [str(uuid4()) for _ in range(3)] + session_id_one = f"test_person_id_filter-{str(uuid4())}" + session_id_two = f"test_person_id_filter-{str(uuid4())}" + p = Person.objects.create( + team=self.team, + distinct_ids=[three_user_ids[0], three_user_ids[1]], + properties={"email": "bla"}, + ) + produce_replay_summary( + distinct_id=three_user_ids[0], + session_id=session_id_one, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=three_user_ids[1], + session_id=session_id_two, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=three_user_ids[2], + session_id=str(uuid4()), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by({"person_uuid": str(p.uuid)}) + assert sorted([r["session_id"] for r in session_recordings]) == sorted([session_id_two, session_id_one]) + + @snapshot_clickhouse_queries + def test_all_filters_at_once(self): + three_user_ids = [str(uuid4()) for _ in range(3)] + target_session_id = f"test_all_filters_at_once-{str(uuid4())}" + + p = Person.objects.create( + team=self.team, + distinct_ids=[three_user_ids[0], three_user_ids[1]], + properties={"email": "bla"}, + ) + custom_event_action = self.create_action(name="custom-event") + + produce_replay_summary( + distinct_id=three_user_ids[0], + session_id=target_session_id, + first_timestamp=(self.an_hour_ago - relativedelta(days=3)), + team_id=self.team.id, + ) + produce_replay_summary( + # does not match because of user distinct id + distinct_id=three_user_ids[2], + session_id=target_session_id, + first_timestamp=(self.an_hour_ago - relativedelta(days=3)), + team_id=self.team.id, + ) + self.create_event( + three_user_ids[0], + self.an_hour_ago - relativedelta(days=3), + properties={"$session_id": target_session_id}, + ) + self.create_event( + three_user_ids[0], + self.an_hour_ago - relativedelta(days=3), + event_name="custom-event", + properties={"$browser": "Chrome", "$session_id": target_session_id}, + ) + produce_replay_summary( + distinct_id=three_user_ids[1], + session_id=target_session_id, + first_timestamp=(self.an_hour_ago - relativedelta(days=3) + relativedelta(hours=6)), + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=three_user_ids[1], + # does not match because of session id + session_id=str(uuid4()), + first_timestamp=(self.an_hour_ago - relativedelta(days=3) + relativedelta(hours=6)), + team_id=self.team.id, + ) + + flush_persons_and_events() + + (session_recordings, _, _) = self._filter_recordings_by( + { + "person_uuid": str(p.uuid), + "date_to": (self.an_hour_ago + relativedelta(days=3)).strftime("%Y-%m-%d"), + "date_from": (self.an_hour_ago - relativedelta(days=10)).strftime("%Y-%m-%d"), + "having_predicates": '[{"type":"recording","key":"duration","value":60,"operator":"gt"}]', + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "actions": [ + { + "id": custom_event_action.id, + "type": "actions", + "order": 1, + "name": "custom-event", + } + ], + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == target_session_id + + def test_teams_dont_leak_event_filter(self): + user = "test_teams_dont_leak_event_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + another_team = Team.objects.create(organization=self.organization) + + session_id = f"test_teams_dont_leak_event_filter-{str(uuid4())}" + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event(1, self.an_hour_ago + relativedelta(seconds=15), team=another_team) + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + } + ) + assert session_recordings == [] + + @snapshot_clickhouse_queries + @also_test_with_materialized_columns(person_properties=["email"]) + def test_filter_with_person_properties_exact(self): + session_id_one, session_id_two = self._two_sessions_two_persons( + "test_filter_with_person_properties_exact", + session_one_person_properties={"email": "bla@gmail.com"}, + session_two_person_properties={"email": "bla2@hotmail.com"}, + ) + + query_results: SessionRecordingQueryResult = self._filter_recordings_by( + { + "properties": [ + { + "key": "email", + "value": ["bla@gmail.com"], + "operator": "exact", + "type": "person", + } + ] + } + ) + + assert [x["session_id"] for x in query_results.results] == [session_id_one] + + @snapshot_clickhouse_queries + @also_test_with_materialized_columns(person_properties=["email"]) + def test_filter_with_person_properties_not_contains(self): + session_id_one, session_id_two = self._two_sessions_two_persons( + "test_filter_with_person_properties_not_contains", + session_one_person_properties={"email": "bla@gmail.com"}, + session_two_person_properties={"email": "bla2@hotmail.com"}, + ) + + query_results: SessionRecordingQueryResult = self._filter_recordings_by( + {"properties": [{"key": "email", "value": "gmail.com", "operator": "not_icontains", "type": "person"}]} + ) + + assert [x["session_id"] for x in query_results.results] == [session_id_two] + + def _two_sessions_two_persons( + self, label: str, session_one_person_properties: dict, session_two_person_properties: dict + ) -> tuple[str, str]: + sessions = [] + + for i in range(2): + user = f"{label}-user-{i}" + session = f"{label}-session-{i}" + sessions.append(session) + + Person.objects.create( + team=self.team, + distinct_ids=[user], + properties=session_one_person_properties if i == 0 else session_two_person_properties, + ) + + produce_replay_summary( + distinct_id=user, + session_id=session, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user, + session_id=session, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=30)), + team_id=self.team.id, + ) + + return sessions[0], sessions[1] + + @snapshot_clickhouse_queries + @also_test_with_materialized_columns(person_properties=["$some_prop"]) + def test_filter_with_cohort_properties(self): + with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): + with freeze_time("2021-08-21T20:00:00.000Z"): + user_one = "test_filter_with_cohort_properties-user" + user_two = "test_filter_with_cohort_properties-user2" + session_id_one = f"test_filter_with_cohort_properties-1-{str(uuid4())}" + session_id_two = f"test_filter_with_cohort_properties-2-{str(uuid4())}" + + Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=[user_two], + properties={"email": "bla2", "$some_prop": "some_val"}, + ) + cohort = Cohort.objects.create( + team=self.team, + name="cohort1", + groups=[ + { + "properties": [ + { + "key": "$some_prop", + "value": "some_val", + "type": "person", + } + ] + } + ], + ) + cohort.calculate_people_ch(pending_version=0) + + produce_replay_summary( + distinct_id=user_one, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + # self.create_event(user_one, self.base_time, team=self.team) + produce_replay_summary( + distinct_id=user_one, + session_id=session_id_one, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user_two, + session_id=session_id_two, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + # self.create_event(user_two, self.base_time, team=self.team) + produce_replay_summary( + distinct_id=user_two, + session_id=session_id_two, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "id", + "value": cohort.pk, + "operator": "in", + "type": "cohort", + } + ] + } + ) + + assert [x["session_id"] for x in session_recordings] == [session_id_two] + + @snapshot_clickhouse_queries + @also_test_with_materialized_columns(person_properties=["$some_prop"]) + def test_filter_with_static_and_dynamic_cohort_properties(self): + with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): + with freeze_time("2021-08-21T20:00:00.000Z"): + user_one = "test_filter_with_cohort_properties-user-in-static-cohort" + user_two = "test_filter_with_cohort_properties-user2-in-dynamic-cohort" + user_three = "test_filter_with_cohort_properties-user3-in-both-cohort" + + session_id_one = ( + f"in-static-cohort-test_filter_with_static_and_dynamic_cohort_properties-1-{str(uuid4())}" + ) + session_id_two = ( + f"in-dynamic-cohort-test_filter_with_static_and_dynamic_cohort_properties-2-{str(uuid4())}" + ) + session_id_three = ( + f"in-both-cohort-test_filter_with_static_and_dynamic_cohort_properties-3-{str(uuid4())}" + ) + + Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "in@static.cohort"}) + Person.objects.create( + team=self.team, + distinct_ids=[user_two], + properties={"email": "in@dynamic.cohort", "$some_prop": "some_val"}, + ) + Person.objects.create( + team=self.team, + distinct_ids=[user_three], + properties={"email": "in@both.cohorts", "$some_prop": "some_val"}, + ) + + dynamic_cohort = Cohort.objects.create( + team=self.team, + name="cohort1", + groups=[ + { + "properties": [ + { + "key": "$some_prop", + "value": "some_val", + "type": "person", + } + ] + } + ], + ) + + static_cohort = Cohort.objects.create(team=self.team, name="a static cohort", groups=[], is_static=True) + static_cohort.insert_users_by_list([user_one, user_three]) + + dynamic_cohort.calculate_people_ch(pending_version=0) + static_cohort.calculate_people_ch(pending_version=0) + + replay_summaries = [ + (user_one, session_id_one), + (user_two, session_id_two), + (user_three, session_id_three), + ] + for distinct_id, session_id in replay_summaries: + produce_replay_summary( + distinct_id=distinct_id, + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=distinct_id, + session_id=session_id, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "id", + "value": static_cohort.pk, + "operator": "in", + "type": "cohort", + }, + ] + } + ) + + assert sorted([x["session_id"] for x in session_recordings]) == sorted( + [session_id_one, session_id_three] + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "id", + "value": dynamic_cohort.pk, + "operator": "in", + "type": "cohort", + }, + ] + } + ) + + assert sorted([x["session_id"] for x in session_recordings]) == sorted( + [session_id_two, session_id_three] + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "id", + "value": dynamic_cohort.pk, + "operator": "in", + "type": "cohort", + }, + { + "key": "id", + "value": static_cohort.pk, + "operator": "in", + "type": "cohort", + }, + ] + } + ) + + assert sorted([x["session_id"] for x in session_recordings]) == [session_id_three] + + @snapshot_clickhouse_queries + @also_test_with_materialized_columns(person_properties=["$some_prop"]) + def test_filter_with_events_and_cohorts(self): + with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): + with freeze_time("2021-08-21T20:00:00.000Z"): + user_one = "test_filter_with_events_and_cohorts-user" + user_two = "test_filter_with_events_and_cohorts-user2" + session_id_one = f"test_filter_with_events_and_cohorts-1-{str(uuid4())}" + session_id_two = f"test_filter_with_events_and_cohorts-2-{str(uuid4())}" + + Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=[user_two], + properties={"email": "bla2", "$some_prop": "some_val"}, + ) + cohort = Cohort.objects.create( + team=self.team, + name="cohort1", + groups=[ + { + "properties": [ + { + "key": "$some_prop", + "value": "some_val", + "type": "person", + } + ] + } + ], + ) + cohort.calculate_people_ch(pending_version=0) + + produce_replay_summary( + distinct_id=user_one, + session_id=session_id_one, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + user_one, + self.an_hour_ago, + team=self.team, + event_name="custom_event", + properties={"$session_id": session_id_one}, + ) + produce_replay_summary( + distinct_id=user_one, + session_id=session_id_one, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user_two, + session_id=session_id_two, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + user_two, + self.an_hour_ago, + team=self.team, + event_name="custom_event", + properties={"$session_id": session_id_two}, + ) + produce_replay_summary( + distinct_id=user_two, + session_id=session_id_two, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + # has to be in the cohort and pageview has to be in the events + # test data has one user in the cohort but no pageviews + "properties": [ + { + "key": "id", + "value": cohort.pk, + "operator": "in", + "type": "cohort", + } + ], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + } + ) + + assert [s["session_id"] for s in session_recordings] == [] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "id", + "value": cohort.pk, + "operator": "in", + "type": "cohort", + } + ], + "events": [ + { + "id": "custom_event", + "type": "events", + "order": 0, + "name": "custom_event", + } + ], + } + ) + + assert [x["session_id"] for x in session_recordings] == [session_id_two] + + @snapshot_clickhouse_queries + @also_test_with_materialized_columns(["$current_url"]) + def test_event_filter_with_matching_on_session_id(self): + user_distinct_id = "test_event_filter_with_matching_on_session_id-user" + Person.objects.create(team=self.team, distinct_ids=[user_distinct_id], properties={"email": "bla"}) + session_id = f"test_event_filter_with_matching_on_session_id-1-{str(uuid4())}" + + self.create_event( + user_distinct_id, + self.an_hour_ago, + event_name="$pageview", + properties={"$session_id": session_id}, + ) + self.create_event( + user_distinct_id, + self.an_hour_ago, + event_name="$autocapture", + properties={"$session_id": str(uuid4())}, + ) + + produce_replay_summary( + distinct_id=user_distinct_id, + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user_distinct_id, + session_id=session_id, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$autocapture", + "type": "events", + "order": 0, + "name": "$autocapture", + } + ] + } + ) + assert session_recordings == [] + + @also_test_with_materialized_columns(event_properties=["$current_url", "$browser"], person_properties=["email"]) + @snapshot_clickhouse_queries + def test_event_filter_with_hogql_properties(self): + user = "test_event_filter_with_hogql_properties-user" + + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id = f"test_event_filter_with_hogql_properties-1-{str(uuid4())}" + self.create_event( + user, + self.an_hour_ago, + properties={ + "$browser": "Chrome", + "$session_id": session_id, + "$window_id": str(uuid4()), + }, + ) + + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + "properties": [ + {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, + ], + } + ] + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + "properties": [{"key": "properties.$browser == 'Firefox'", "type": "hogql"}], + } + ] + } + ) + + assert session_recordings == [] + + @snapshot_clickhouse_queries + def test_event_filter_with_hogql_person_properties(self): + user = "test_event_filter_with_hogql_properties-user" + + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id = f"test_event_filter_with_hogql_properties-1-{str(uuid4())}" + self.create_event( + user, + self.an_hour_ago, + properties={ + "$browser": "Chrome", + "$session_id": session_id, + "$window_id": str(uuid4()), + }, + ) + + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id=user, + session_id=session_id, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + "properties": [ + { + "key": "person.properties.email == 'bla'", + "type": "hogql", + }, + ], + } + ] + } + ) + + assert len(session_recordings) == 1 + assert session_recordings[0]["session_id"] == session_id + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + "properties": [ + { + "key": "person.properties.email == 'something else'", + "type": "hogql", + }, + ], + } + ] + } + ) + + assert session_recordings == [] + + @also_test_with_materialized_columns(["$current_url", "$browser"]) + @snapshot_clickhouse_queries + @freeze_time("2021-01-21T20:00:00.000Z") + def test_any_event_filter_with_properties(self): + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + + page_view_session_id = f"pageview-session-{str(uuid4())}" + my_custom_event_session_id = f"my-custom-event-session-{str(uuid4())}" + non_matching__event_session_id = f"non-matching-event-session-{str(uuid4())}" + + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$browser": "Chrome", + "$session_id": page_view_session_id, + "$window_id": "1", + }, + event_name="$pageview", + ) + + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$browser": "Chrome", + "$session_id": my_custom_event_session_id, + "$window_id": "1", + }, + event_name="my-custom-event", + ) + + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$browser": "Safari", + "$session_id": non_matching__event_session_id, + "$window_id": "1", + }, + event_name="my-non-matching-event", + ) + + produce_replay_summary( + distinct_id="user", + session_id=page_view_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id="user", + session_id=my_custom_event_session_id, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + produce_replay_summary( + distinct_id="user", + session_id=non_matching__event_session_id, + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + # an id of null means "match any event" + "id": None, + "type": "events", + "order": 0, + "name": "All events", + "properties": [], + } + ] + } + ) + + assert sorted( + [sr["session_id"] for sr in session_recordings], + ) == [ + my_custom_event_session_id, + non_matching__event_session_id, + page_view_session_id, + ] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + # an id of null means "match any event" + "id": None, + "type": "events", + "order": 0, + "name": "All events", + "properties": [ + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "event", + } + ], + } + ] + } + ) + + assert sorted( + [sr["session_id"] for sr in session_recordings], + ) == [ + my_custom_event_session_id, + page_view_session_id, + ] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": None, + "type": "events", + "order": 0, + "name": "All events", + "properties": [ + { + "key": "$browser", + "value": ["Firefox"], + "operator": "exact", + "type": "event", + } + ], + } + ] + } + ) + assert session_recordings == [] + + @snapshot_clickhouse_queries + @freeze_time("2021-01-21T20:00:00.000Z") + def test_filter_for_recordings_with_console_logs(self): + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + + with_logs_session_id = f"with-logs-session-{str(uuid4())}" + without_logs_session_id = f"no-logs-session-{str(uuid4())}" + + produce_replay_summary( + distinct_id="user", + session_id=with_logs_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_log_count=4, + log_messages={ + "info": [ + "info", + "info", + "info", + ], + }, + ) + + produce_replay_summary( + distinct_id="user", + session_id=without_logs_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + + # (session_recordings, _, _) = self._filter_recordings_by({"console_logs": ["info"]}) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) + + actual = sorted( + [(sr["session_id"], sr["console_log_count"]) for sr in session_recordings], + key=lambda x: x[0], + ) + + assert actual == [ + (with_logs_session_id, 4), + ] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) + assert session_recordings == [] + + @snapshot_clickhouse_queries + @freeze_time("2021-01-21T20:00:00.000Z") + def test_filter_for_recordings_with_console_warns(self): + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + + with_logs_session_id = f"with-logs-session-{str(uuid4())}" + without_logs_session_id = f"no-logs-session-{str(uuid4())}" + + produce_replay_summary( + distinct_id="user", + session_id=with_logs_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_warn_count=4, + log_messages={ + "warn": [ + "warn", + "warn", + "warn", + "warn", + ], + }, + ) + produce_replay_summary( + distinct_id="user", + session_id=without_logs_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["warn"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) + + assert sorted( + [(sr["session_id"], sr["console_warn_count"]) for sr in session_recordings], + key=lambda x: x[0], + ) == [ + (with_logs_session_id, 4), + ] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) + + assert session_recordings == [] + + @snapshot_clickhouse_queries + @freeze_time("2021-01-21T20:00:00.000Z") + def test_filter_for_recordings_with_console_errors(self): + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + + with_logs_session_id = f"with-logs-session-{str(uuid4())}" + without_logs_session_id = f"no-logs-session-{str(uuid4())}" + + produce_replay_summary( + distinct_id="user", + session_id=with_logs_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_error_count=4, + log_messages={ + "error": [ + "error", + "error", + "error", + "error", + ], + }, + ) + produce_replay_summary( + distinct_id="user", + session_id=without_logs_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["error"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) + + assert sorted( + [(sr["session_id"], sr["console_error_count"]) for sr in session_recordings], + key=lambda x: x[0], + ) == [ + (with_logs_session_id, 4), + ] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) + + assert session_recordings == [] + + @snapshot_clickhouse_queries + @freeze_time("2021-01-21T20:00:00.000Z") + def test_filter_for_recordings_with_mixed_console_counts(self): + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + + with_logs_session_id = f"with-logs-session-{str(uuid4())}" + with_warns_session_id = f"with-warns-session-{str(uuid4())}" + with_errors_session_id = f"with-errors-session-{str(uuid4())}" + with_two_session_id = f"with-two-session-{str(uuid4())}" + + produce_replay_summary( + distinct_id="user", + session_id=with_logs_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_log_count=4, + log_messages={ + "info": [ + "info", + "info", + "info", + "info", + ], + }, + ) + produce_replay_summary( + distinct_id="user", + session_id=with_warns_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_warn_count=4, + log_messages={ + "warn": [ + "warn", + "warn", + "warn", + "warn", + ], + }, + ) + produce_replay_summary( + distinct_id="user", + session_id=with_errors_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_error_count=4, + log_messages={ + "error": [ + "error", + "error", + "error", + "error", + ], + }, + ) + produce_replay_summary( + distinct_id="user", + session_id=with_two_session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_error_count=4, + console_log_count=3, + log_messages={ + "error": [ + "error", + "error", + "error", + "error", + ], + "info": [ + "info", + "info", + "info", + ], + }, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) + + assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( + [ + with_errors_session_id, + with_two_session_id, + with_warns_session_id, + ] + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "console_log_filters": '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}]', + "operand": "AND", + } + ) + + assert sorted([sr["session_id"] for sr in session_recordings]) == sorted( + [ + with_two_session_id, + with_logs_session_id, + ] + ) + + @parameterized.expand( + [ + # Case 1: OR operand, message 4 matches in warn and error + ( + '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 4", "operator": "icontains", "type": "log_entry"}]', + "OR", + ["with-errors-session", "with-two-session", "with-warns-session", "with-logs-session"], + ), + # Case 2: AND operand, message 4 matches in log, warn, and error + ( + '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 4", "operator": "icontains", "type": "log_entry"}]', + "AND", + ["with-errors-session", "with-two-session", "with-warns-session"], + ), + # Case 2: AND operand, message 5 matches only in warn + ( + '[{"key": "level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 5", "operator": "icontains", "type": "log_entry"}]', + "AND", + ["with-warns-session"], + ), + # Case 3: AND operand, message 5 does not match log level "info" + ( + '[{"key": "level", "value": ["info"], "operator": "exact", "type": "log_entry"}, {"key": "message", "value": "message 5", "operator": "icontains", "type": "log_entry"}]', + "AND", + [], + ), + ] + ) + @snapshot_clickhouse_queries + @freeze_time("2021-01-21T20:00:00.000Z") + def test_filter_for_recordings_by_console_text( + self, + console_log_filters: str, + operand: Literal["AND", "OR"], + expected_session_ids: list[str], + ) -> None: + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + + # Create sessions + produce_replay_summary( + distinct_id="user", + session_id="with-logs-session", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_log_count=4, + log_messages={ + "info": [ + "log message 1", + "log message 2", + "log message 3", + "log message 4", + ] + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="with-warns-session", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_warn_count=5, + log_messages={ + "warn": [ + "warn message 1", + "warn message 2", + "warn message 3", + "warn message 4", + "warn message 5", + ] + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="with-errors-session", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_error_count=4, + log_messages={ + "error": [ + "error message 1", + "error message 2", + "error message 3", + "error message 4", + ] + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="with-two-session", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_error_count=4, + console_log_count=3, + log_messages={ + "error": [ + "error message 1", + "error message 2", + "error message 3", + "error message 4", + ], + "info": ["log message 1", "log message 2", "log message 3"], + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="with-no-matches-session", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + console_error_count=4, + console_log_count=3, + log_messages={ + "info": ["log message 1", "log message 2", "log message 3"], + }, + ) + + # Perform the filtering and validate results + session_recordings, _, _ = self._filter_recordings_by( + {"console_log_filters": console_log_filters, "operand": operand} + ) + + assert sorted([sr["session_id"] for sr in session_recordings]) == sorted(expected_session_ids) + + @snapshot_clickhouse_queries + def test_filter_for_recordings_by_snapshot_source(self): + user = "test_duration_filter-user" + Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) + + session_id_one = "session one id" + produce_replay_summary( + distinct_id=user, + session_id=session_id_one, + team_id=self.team.id, + snapshot_source="web", + ) + + session_id_two = "session two id" + produce_replay_summary( + distinct_id=user, + session_id=session_id_two, + team_id=self.team.id, + snapshot_source="mobile", + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "having_predicates": '[{"key": "snapshot_source", "value": ["web"], "operator": "exact", "type": "recording"}]' + } + ) + assert [r["session_id"] for r in session_recordings] == [session_id_one] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "having_predicates": '[{"key": "snapshot_source", "value": ["mobile"], "operator": "exact", "type": "recording"}]' + } + ) + assert [r["session_id"] for r in session_recordings] == [session_id_two] + + @also_test_with_materialized_columns( + event_properties=["is_internal_user"], + person_properties=["email"], + verify_no_jsonextract=False, + ) + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_event_filter_with_test_accounts_excluded(self): + self.team.test_account_filters = [ + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + }, + { + "key": "is_internal_user", + "value": ["false"], + "operator": "exact", + "type": "event", + }, + {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, + ] + self.team.save() + + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": "true", + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": True, + } + ) + self.assertEqual(len(session_recordings), 0) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": False, + } + ) + self.assertEqual(len(session_recordings), 1) + + @also_test_with_materialized_columns( + event_properties=["$browser"], + person_properties=["email"], + verify_no_jsonextract=False, + ) + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_event_filter_with_hogql_event_properties_test_accounts_excluded(self): + self.team.test_account_filters = [ + {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, + ] + self.team.save() + + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user", + self.an_hour_ago, + properties={"$session_id": "1", "$window_id": "1", "$browser": "Chrome"}, + ) + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + produce_replay_summary( + distinct_id="user2", + session_id="2", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user2", + self.an_hour_ago, + properties={"$session_id": "2", "$window_id": "1", "$browser": "Firefox"}, + ) + + # there are 2 pageviews + (session_recordings, _, _) = self._filter_recordings_by( + { + # pageview that matches the hogql test_accounts filter + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": False, + } + ) + self.assertEqual(len(session_recordings), 2) + + self.team.test_account_filters = [ + {"key": "person.properties.email == 'bla'", "type": "hogql"}, + ] + self.team.save() + + (session_recordings, _, _) = self._filter_recordings_by( + { + # only 1 pageview that matches the hogql test_accounts filter + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": True, + } + ) + self.assertEqual(len(session_recordings), 1) + + self.team.test_account_filters = [ + {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, + {"key": "person.properties.email == 'bla'", "type": "hogql"}, + ] + self.team.save() + + # one user sessions matches the person + event test_account filter + (session_recordings, _, _) = self._filter_recordings_by( + { + "filter_test_accounts": True, + } + ) + self.assertEqual(len(session_recordings), 1) + + # TRICKY: we had to disable use of materialized columns for part of the query generation + # due to RAM usage issues on the EU cluster + @also_test_with_materialized_columns(event_properties=["is_internal_user"], verify_no_jsonextract=False) + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_top_level_event_property_test_account_filter(self): + """ + This is a regression test. A user with an $ip test account filter + reported the filtering wasn't working. + + The filter wasn't triggering the "should join events check", and so we didn't apply the filter at all + """ + self.team.test_account_filters = [ + { + "key": "is_internal_user", + "value": ["false"], + "operator": "exact", + "type": "event", + }, + ] + self.team.save() + + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + produce_replay_summary( + distinct_id="user2", + session_id="2", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user2", + self.an_hour_ago, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, + ) + + # there are 2 pageviews + (session_recordings, _, _) = self._filter_recordings_by( + { + # pageview that matches the hogql test_accounts filter + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": False, + } + ) + self.assertEqual(len(session_recordings), 2) + + (session_recordings, _, _) = self._filter_recordings_by( + { + # only 1 pageview that matches the test_accounts filter + "filter_test_accounts": True, + } + ) + self.assertEqual(len(session_recordings), 1) + + # TRICKY: we had to disable use of materialized columns for part of the query generation + # due to RAM usage issues on the EU cluster + @also_test_with_materialized_columns(event_properties=["is_internal_user"], verify_no_jsonextract=True) + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_top_level_event_property_test_account_filter_allowing_denormalized_props(self): + """ + This is a duplicate of the test test_top_level_event_property_test_account_filter + but with denormalized props allowed + """ + + with self.settings(ALLOW_DENORMALIZED_PROPS_IN_LISTING=True): + self.team.test_account_filters = [ + { + "key": "is_internal_user", + "value": ["false"], + "operator": "exact", + "type": "event", + }, + ] + self.team.save() + + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + produce_replay_summary( + distinct_id="user2", + session_id="2", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user2", + self.an_hour_ago, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, + ) + + # there are 2 pageviews + (session_recordings, _, _) = self._filter_recordings_by( + { + # pageview that matches the hogql test_accounts filter + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": False, + } + ) + self.assertEqual(len(session_recordings), 2) + + (session_recordings, _, _) = self._filter_recordings_by( + { + # only 1 pageview that matches the test_accounts filter + "filter_test_accounts": True, + } + ) + self.assertEqual(len(session_recordings), 1) + + @also_test_with_materialized_columns(event_properties=["is_internal_user"]) + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_top_level_hogql_event_property_test_account_filter(self): + """ + This is a regression test. A user with an $ip test account filter + reported the filtering wasn't working. + + The filter wasn't triggering the "should join events" check, and so we didn't apply the filter at all + """ + self.team.test_account_filters = [ + {"key": "properties.is_internal_user == 'true'", "type": "hogql"}, + ] + self.team.save() + + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + produce_replay_summary( + distinct_id="user2", + session_id="2", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user2", + self.an_hour_ago, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, + ) + + # there are 2 pageviews + (session_recordings, _, _) = self._filter_recordings_by( + { + # pageview that matches the hogql test_accounts filter + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": False, + } + ) + self.assertEqual(len(session_recordings), 2) + + (session_recordings, _, _) = self._filter_recordings_by( + { + # only 1 pageview that matches the test_accounts filter + "filter_test_accounts": True, + } + ) + self.assertEqual(len(session_recordings), 1) + + @also_test_with_materialized_columns(person_properties=["email"], verify_no_jsonextract=False) + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_top_level_hogql_person_property_test_account_filter(self): + """ + This is a regression test. A user with an $ip test account filter + reported the filtering wasn't working. + + The filter wasn't triggering the "should join events" check, and so we didn't apply the filter at all + """ + self.team.test_account_filters = [ + {"key": "person.properties.email == 'bla'", "type": "hogql"}, + ] + self.team.save() + + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + produce_replay_summary( + distinct_id="user2", + session_id="2", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user2", + self.an_hour_ago, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, + ) + + # there are 2 pageviews + (session_recordings, _, _) = self._filter_recordings_by( + { + # pageview that matches the hogql test_accounts filter + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": False, + } + ) + self.assertEqual(len(session_recordings), 2) + + (session_recordings, _, _) = self._filter_recordings_by( + { + # only 1 pageview that matches the test_accounts filter + "filter_test_accounts": True, + } + ) + self.assertEqual(len(session_recordings), 1) + + @also_test_with_materialized_columns(person_properties=["email"], verify_no_jsonextract=False) + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_top_level_person_property_test_account_filter(self): + """ + This is a regression test. A user with an $ip test account filter + reported the filtering wasn't working. + + The filter wasn't triggering the "should join events" check, and so we didn't apply the filter at all + """ + self.team.test_account_filters = [{"key": "email", "value": ["bla"], "operator": "exact", "type": "person"}] + self.team.save() + + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user", + self.an_hour_ago, + properties={ + "event": "something that won't match", + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, + ) + + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, + ) + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + ) + + produce_replay_summary( + distinct_id="user2", + session_id="2", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ) + self.create_event( + "user2", + self.an_hour_ago, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, + ) + + # there are 2 pageviews + (session_recordings, _, _) = self._filter_recordings_by( + { + # pageview that matches the hogql test_accounts filter + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": False, + } + ) + self.assertEqual(len(session_recordings), 2) + + (session_recordings, _, _) = self._filter_recordings_by( + { + # only 1 pageview that matches the test_accounts filter + "filter_test_accounts": True, + } + ) + self.assertEqual(len(session_recordings), 1) + + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_event_filter_with_two_events_and_multiple_teams(self): + another_team = Team.objects.create(organization=self.organization) + + # two teams, user with the same properties + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + Person.objects.create(team=another_team, distinct_ids=["user"], properties={"email": "bla"}) + + # a recording session with a pageview and a pageleave + self._a_session_with_two_events(self.team, "1") + self._a_session_with_two_events(another_team, "2") + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "$pageleave", + "type": "events", + "order": 0, + "name": "$pageleave", + }, + ], + } + ) + + self.assertEqual([sr["session_id"] for sr in session_recordings], ["1"]) + + def _a_session_with_two_events(self, team: Team, session_id: str) -> None: + produce_replay_summary( + distinct_id="user", + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=team.pk, + ) + self.create_event( + "user", + self.an_hour_ago, + team=team, + event_name="$pageview", + properties={"$session_id": session_id, "$window_id": "1"}, + ) + self.create_event( + "user", + self.an_hour_ago, + team=team, + event_name="$pageleave", + properties={"$session_id": session_id, "$window_id": "1"}, + ) + + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_event_filter_with_group_filter(self): + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + session_id = f"test_event_filter_with_group_filter-ONE-{uuid4()}" + different_group_session = f"test_event_filter_with_group_filter-TWO-{uuid4()}" + + produce_replay_summary( + distinct_id="user", + session_id=session_id, + first_timestamp=self.an_hour_ago, + team_id=self.team.pk, + ) + produce_replay_summary( + distinct_id="user", + session_id=different_group_session, + first_timestamp=self.an_hour_ago, + team_id=self.team.pk, + ) + + GroupTypeMapping.objects.create( + team=self.team, project_id=self.team.project_id, group_type="project", group_type_index=0 + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="project:1", + properties={"name": "project one"}, + ) + + GroupTypeMapping.objects.create( + team=self.team, project_id=self.team.project_id, group_type="organization", group_type_index=1 + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="org:1", + properties={"name": "org one"}, + ) + + self.create_event( + "user", + self.an_hour_ago, + team=self.team, + event_name="$pageview", + properties={ + "$session_id": session_id, + "$window_id": "1", + "$group_1": "org:1", + }, + ) + self.create_event( + "user", + self.an_hour_ago, + team=self.team, + event_name="$pageview", + properties={ + "$session_id": different_group_session, + "$window_id": "1", + "$group_0": "project:1", + }, + ) + + (session_recordings, _, _) = self._filter_recordings_by( + { + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + "properties": [ + { + "key": "name", + "value": ["org one"], + "operator": "exact", + "type": "group", + "group_type_index": 1, + } + ], + } + ], + } + ) + + assert [sr["session_id"] for sr in session_recordings] == [session_id] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "name", + "value": ["org one"], + "operator": "exact", + "type": "group", + "group_type_index": 1, + } + ], + } + ) + assert [sr["session_id"] for sr in session_recordings] == [session_id] + + (session_recordings, _, _) = self._filter_recordings_by( + { + "properties": [ + { + "key": "name", + "value": ["org one"], + "operator": "exact", + "type": "group", + "group_type_index": 2, + } + ], + } + ) + assert session_recordings == [] + + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_ordering(self): + session_id_one = f"test_ordering-one" + session_id_two = f"test_ordering-two" + session_id_three = f"test_ordering-three" + + produce_replay_summary( + session_id=session_id_one, + team_id=self.team.id, + mouse_activity_count=50, + first_timestamp=(self.an_hour_ago + relativedelta(seconds=60)), + ) + produce_replay_summary( + session_id=session_id_two, + team_id=self.team.id, + mouse_activity_count=100, + first_timestamp=self.an_hour_ago, + ) + produce_replay_summary( + session_id=session_id_three, + team_id=self.team.id, + mouse_activity_count=10, + first_timestamp=(self.an_hour_ago + relativedelta(minutes=10)), + ) + + (session_recordings, _, _) = self._filter_recordings_by({"order": "start_time"}) + assert [r["session_id"] for r in session_recordings] == [session_id_three, session_id_one, session_id_two] + + (session_recordings, _, _) = self._filter_recordings_by({"order": "mouse_activity_count"}) + assert [r["session_id"] for r in session_recordings] == [session_id_two, session_id_one, session_id_three] + + @also_test_with_materialized_columns(event_properties=["$host"], verify_no_jsonextract=False) + @freeze_time("2021-01-21T20:00:00.000Z") + @snapshot_clickhouse_queries + def test_top_level_event_host_property_test_account_filter(self): + """ + This is a regression test. See: https://posthoghelp.zendesk.com/agent/tickets/18059 + """ + self.team.test_account_filters = [ + {"key": "$host", "type": "event", "value": "^(localhost|127\\.0\\.0\\.1)($|:)", "operator": "not_regex"}, + ] + self.team.save() + + Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + ensure_analytics_event_in_session=False, + ) + # the session needs to have multiple matching or not matching events + for _ in range(10): + self.create_event( + "user", + self.an_hour_ago, + properties={ + "$session_id": "1", + "$window_id": "1", + "$host": "localhost", + }, + ) + + produce_replay_summary( + distinct_id="user", + session_id="1", + first_timestamp=self.an_hour_ago + relativedelta(seconds=30), + team_id=self.team.id, + click_count=10, + ensure_analytics_event_in_session=False, + ) + + for _ in range(10): + self.create_event( + "user2", + self.an_hour_ago, + properties={ + "$session_id": "2", + "$window_id": "1", + "$host": "example.com", + }, + ) + produce_replay_summary( + distinct_id="user2", + session_id="2", + first_timestamp=self.an_hour_ago, + team_id=self.team.id, + click_count=10, + ensure_analytics_event_in_session=False, + ) + + # there are 2 pageviews + (session_recordings, _, _) = self._filter_recordings_by( + { + # pageview that matches the hogql test_accounts filter + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "filter_test_accounts": False, + } + ) + self.assertEqual(len(session_recordings), 2) + + (session_recordings, _, _) = self._filter_recordings_by( + { + # only 1 pageview that matches the test_accounts filter + "filter_test_accounts": True, + } + ) + assert session_recordings == [ + { + "active_seconds": 0.0, + "activity_score": 0.28, + "click_count": 10, # in the bug this value was 10 X number of events in the session + "console_error_count": 0, + "console_log_count": 0, + "console_warn_count": 0, + "distinct_id": "user2", + "duration": 3600, + "end_time": ANY, + "first_url": "https://not-provided-by-test.com", + "inactive_seconds": 3600.0, + "keypress_count": 0, + "mouse_activity_count": 0, + "session_id": "2", + "start_time": ANY, + "team_id": self.team.id, + "ongoing": 1, + } + ] diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index 3961109fc365f..0b9ea6e40d48c 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -4,6 +4,7 @@ from collections.abc import Generator from contextlib import contextmanager from datetime import UTC, datetime, timedelta +from json import JSONDecodeError from typing import Any, Optional, cast import posthoganalytics @@ -20,8 +21,6 @@ from rest_framework.response import Response from rest_framework.utils.encoders import JSONEncoder -from ee.session_recordings.ai.error_clustering import error_clustering -from ee.session_recordings.ai.similar_recordings import similar_recordings from ee.session_recordings.session_summary.summarize_session import summarize_recording from posthog.api.person import MinimalPersonSerializer from posthog.api.routing import TeamAndOrgViewSetMixin @@ -38,7 +37,7 @@ ClickHouseSustainedRateThrottle, PersonalApiKeyRateThrottle, ) -from posthog.schema import HogQLQueryModifiers, QueryTiming +from posthog.schema import HogQLQueryModifiers, QueryTiming, RecordingsQuery from posthog.session_recordings.models.session_recording import SessionRecording from posthog.session_recordings.models.session_recording_event import ( SessionRecordingViewed, @@ -47,6 +46,7 @@ ReplayFiltersEventsSubQuery, SessionRecordingListFromFilters, ) +from posthog.session_recordings.queries.session_recording_list_from_query import SessionRecordingListFromQuery from posthog.session_recordings.queries.session_recording_properties import ( SessionRecordingProperties, ) @@ -242,10 +242,8 @@ def validate(self, data): return data -def list_recordings_response( - filter: SessionRecordingsFilter, request: request.Request, serializer_context: dict[str, Any] -) -> Response: - (recordings, timings) = list_recordings(filter, request, context=serializer_context) +def list_recordings_response(listing_result: tuple[dict, dict]) -> Response: + (recordings, timings) = listing_result response = Response(recordings) response.headers["Server-Timing"] = ", ".join( f"{key};dur={round(duration, ndigits=2)}" for key, duration in timings.items() @@ -296,6 +294,22 @@ class SnapshotsSustainedRateThrottle(PersonalApiKeyRateThrottle): rate = "600/hour" +def query_as_params_to_dict(params_dict: dict) -> dict: + """ + before (if ever) we convert this to a query runner that takes a post + we need to convert to a valid dict from the data that arrived in query params + """ + converted = {} + for key in params_dict: + try: + converted[key] = json.loads(params_dict[key]) if isinstance(params_dict[key], str) else params_dict[key] + except JSONDecodeError: + converted[key] = params_dict[key] + + converted.pop("as_query", None) + return converted + + # NOTE: Could we put the sharing stuff in the shared mixin :thinking: class SessionRecordingViewSet(TeamAndOrgViewSetMixin, viewsets.GenericViewSet, UpdateModelMixin): scope_object = "session_recording" @@ -322,9 +336,19 @@ def safely_get_object(self, queryset) -> SessionRecording: return recording def list(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: - filter = SessionRecordingsFilter(request=request, team=self.team) - self._maybe_report_recording_list_filters_changed(request, team=self.team) - return list_recordings_response(filter, request, self.get_serializer_context()) + use_query_type = (request.GET.get("as_query", "False")).lower() == "true" + if use_query_type: + data_dict = query_as_params_to_dict(request.GET.dict()) + query = RecordingsQuery.model_validate(data_dict) + # a little duplication for now + self._maybe_report_recording_list_filters_changed(request, team=self.team) + return list_recordings_response( + list_recordings_from_query(query, request, context=self.get_serializer_context()) + ) + else: + filter = SessionRecordingsFilter(request=request, team=self.team) + self._maybe_report_recording_list_filters_changed(request, team=self.team) + return list_recordings_response(list_recordings(filter, request, context=self.get_serializer_context())) @extend_schema( exclude=True, @@ -685,65 +709,6 @@ def summarize(self, request: request.Request, **kwargs): ) return r - @extend_schema(exclude=True) - @action(methods=["GET"], detail=True) - def similar_sessions(self, request: request.Request, **kwargs): - if not request.user.is_authenticated: - raise exceptions.NotAuthenticated() - - cache_key = f'similar_sessions_{self.team.pk}_{self.kwargs["pk"]}' - # Check if the response is cached - cached_response = cache.get(cache_key) - if cached_response: - return Response(cached_response) - - user = cast(User, request.user) - - if not posthoganalytics.feature_enabled("session-replay-similar-recordings", str(user.distinct_id)): - raise exceptions.ValidationError("similar recordings is not enabled for this user") - - recording = self.get_object() - - if not SessionReplayEvents().exists(session_id=str(recording.session_id), team=self.team): - raise exceptions.NotFound("Recording not found") - - recordings = similar_recordings(recording, self.team) - if recordings: - cache.set(cache_key, recordings, timeout=30) - - # let the browser cache for half the time we cache on the server - r = Response(recordings, headers={"Cache-Control": "max-age=15"}) - return r - - @extend_schema(exclude=True) - @action(methods=["GET"], detail=False) - def error_clusters(self, request: request.Request, **kwargs): - if not request.user.is_authenticated: - raise exceptions.NotAuthenticated() - - refresh_clusters = request.GET.get("refresh") - - cache_key = f"cluster_errors_{self.team.pk}" - # Check if the response is cached - cached_response = cache.get(cache_key) - if cached_response and not refresh_clusters: - return Response(cached_response) - - user = cast(User, request.user) - - if not posthoganalytics.feature_enabled("session-replay-error-clustering", str(user.distinct_id)): - raise exceptions.ValidationError("clustered errors is not enabled for this user") - - # Clustering will eventually be done during a scheduled background task - clusters = error_clustering(self.team) - - if clusters: - cache.set(cache_key, clusters, settings.CACHED_RESULTS_TTL) - - # let the browser cache for half the time we cache on the server - r = Response(clusters, headers={"Cache-Control": "max-age=15"}) - return r - def _stream_blob_to_client( self, recording: SessionRecording, request: request.Request, event_properties: dict ) -> HttpResponse: @@ -855,6 +820,107 @@ def _send_realtime_snapshots_to_client( raise exceptions.ValidationError(f"Invalid version: {version}") +# TODO i guess this becomes the query runner for our _internal_ use of RecordingsQuery +def list_recordings_from_query( + query: RecordingsQuery, request: request.Request, context: dict[str, Any] +) -> tuple[dict, dict]: + """ + As we can store recordings in S3 or in Clickhouse we need to do a few things here + + A. If filter.session_ids is specified: + 1. We first try to load them directly from Postgres if they have been persisted to S3 (they might have fell out of CH) + 2. Any that couldn't be found are then loaded from Clickhouse + B. Otherwise we just load all values from Clickhouse + 2. Once loaded we convert them to SessionRecording objects in case we have any other persisted data + """ + + all_session_ids = query.session_ids + + recordings: list[SessionRecording] = [] + more_recordings_available = False + team = context["get_team"]() + hogql_timings: list[QueryTiming] | None = None + + timer = ServerTimingsGathered() + + if all_session_ids: + with timer("load_persisted_recordings"): + # If we specify the session ids (like from pinned recordings) we can optimise by only going to Postgres + sorted_session_ids = sorted(all_session_ids) + + persisted_recordings_queryset = SessionRecording.objects.filter( + team=team, session_id__in=sorted_session_ids + ).exclude(object_storage_path=None) + + persisted_recordings = persisted_recordings_queryset.all() + + recordings = recordings + list(persisted_recordings) + + remaining_session_ids = list(set(all_session_ids) - {x.session_id for x in persisted_recordings}) + query.session_ids = remaining_session_ids + + if (all_session_ids and query.session_ids) or not all_session_ids: + distinct_id = str(cast(User, request.user).distinct_id) + modifiers = safely_read_modifiers_overrides(distinct_id, team) + + with timer("load_recordings_from_hogql"): + (ch_session_recordings, more_recordings_available, hogql_timings) = SessionRecordingListFromQuery( + query=query, team=team, hogql_query_modifiers=modifiers + ).run() + + with timer("build_recordings"): + recordings_from_clickhouse = SessionRecording.get_or_build_from_clickhouse(team, ch_session_recordings) + recordings = recordings + recordings_from_clickhouse + + recordings = [x for x in recordings if not x.deleted] + + # If we have specified session_ids we need to sort them by the order they were specified + if all_session_ids: + recordings = sorted( + recordings, + key=lambda x: cast(list[str], all_session_ids).index(x.session_id), + ) + + if not request.user.is_authenticated: # for mypy + raise exceptions.NotAuthenticated() + + # Update the viewed status for all loaded recordings + with timer("load_viewed_recordings"): + viewed_session_recordings = set( + SessionRecordingViewed.objects.filter(team=team, user=request.user).values_list("session_id", flat=True) + ) + + with timer("load_persons"): + # Get the related persons for all the recordings + distinct_ids = sorted([x.distinct_id for x in recordings]) + person_distinct_ids = PersonDistinctId.objects.filter(distinct_id__in=distinct_ids, team=team).select_related( + "person" + ) + + with timer("process_persons"): + distinct_id_to_person = {} + for person_distinct_id in person_distinct_ids: + person_distinct_id.person._distinct_ids = [ + person_distinct_id.distinct_id + ] # Stop the person from loading all distinct ids + distinct_id_to_person[person_distinct_id.distinct_id] = person_distinct_id.person + + for recording in recordings: + recording.viewed = recording.session_id in viewed_session_recordings + person = distinct_id_to_person.get(recording.distinct_id) + if person: + recording.person = person + + session_recording_serializer = SessionRecordingSerializer(recordings, context=context, many=True) + results = session_recording_serializer.data + + all_timings = _generate_timings(hogql_timings, timer) + return ( + {"results": results, "has_next": more_recordings_available, "version": 4}, + all_timings, + ) + + def list_recordings( filter: SessionRecordingsFilter, request: request.Request, context: dict[str, Any] ) -> tuple[dict, dict]: @@ -926,7 +992,7 @@ def list_recordings( with timer("load_persons"): # Get the related persons for all the recordings - distinct_ids = sorted([x.distinct_id for x in recordings]) + distinct_ids = sorted([x.distinct_id for x in recordings if x.distinct_id]) person_distinct_ids = PersonDistinctId.objects.filter(distinct_id__in=distinct_ids, team=team).select_related( "person" ) @@ -941,7 +1007,7 @@ def list_recordings( for recording in recordings: recording.viewed = recording.session_id in viewed_session_recordings - person = distinct_id_to_person.get(recording.distinct_id) + person = distinct_id_to_person.get(recording.distinct_id) if recording.distinct_id else None if person: recording.person = person diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr index ccba484c51a23..88a534a569646 100644 --- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr +++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr @@ -640,12 +640,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '421' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '421' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -847,7 +847,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -1043,7 +1044,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -1688,12 +1690,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -1895,7 +1897,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -2042,7 +2045,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -2441,12 +2445,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -2648,7 +2652,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -2701,7 +2706,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -2793,7 +2799,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -3129,12 +3136,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -3336,7 +3343,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -3532,7 +3540,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -3881,12 +3890,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -4088,7 +4097,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -4233,7 +4243,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -4597,12 +4608,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -4804,7 +4815,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -4993,7 +5005,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -5395,12 +5408,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -5602,7 +5615,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -5659,12 +5673,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -5774,7 +5788,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -6034,7 +6049,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -6091,12 +6107,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -6206,7 +6222,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -6556,12 +6573,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -6763,7 +6780,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -6915,7 +6933,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -7248,12 +7267,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -7455,7 +7474,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -7602,7 +7622,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -7997,12 +8018,12 @@ LEFT OUTER JOIN "posthog_organizationmembership" ON ("ee_accesscontrol"."organization_member_id" = "posthog_organizationmembership"."id") WHERE (("ee_accesscontrol"."organization_member_id" IS NULL AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("posthog_organizationmembership"."user_id" = 99999 AND "ee_accesscontrol"."resource" = 'project' - AND "ee_accesscontrol"."resource_id" = '428' + AND "ee_accesscontrol"."resource_id" = '99999' AND "ee_accesscontrol"."role_id" IS NULL AND "ee_accesscontrol"."team_id" = 99999) OR ("ee_accesscontrol"."organization_member_id" IS NULL @@ -8204,7 +8225,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -8257,7 +8279,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" @@ -8349,7 +8372,8 @@ "posthog_datawarehousejoin"."source_table_key", "posthog_datawarehousejoin"."joining_table_name", "posthog_datawarehousejoin"."joining_table_key", - "posthog_datawarehousejoin"."field_name" + "posthog_datawarehousejoin"."field_name", + "posthog_datawarehousejoin"."configuration" FROM "posthog_datawarehousejoin" WHERE ("posthog_datawarehousejoin"."team_id" = 99999 AND NOT ("posthog_datawarehousejoin"."deleted" diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index e1be77900953d..f3c7d7edfa38d 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -19,6 +19,7 @@ from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.property import Property from posthog.models.team import Team +from posthog.schema import RecordingsQuery, LogEntryPropertyFilter from posthog.session_recordings.models.session_recording_event import ( SessionRecordingViewed, ) @@ -171,18 +172,24 @@ def test_can_list_recordings_even_when_the_person_has_multiple_distinct_ids(self @patch("posthoganalytics.capture") @patch("posthog.session_recordings.session_recording_api.SessionRecordingListFromFilters") - def test_console_log_filters_are_correctly_passed_to_listing(self, mock_summary_lister, mock_capture): + @patch("posthog.session_recordings.session_recording_api.list_recordings_from_query") + def test_console_log_filters_are_correctly_passed_to_listing_when_filters_are_used( + self, mock_query_lister, mock_summary_lister, mock_capture + ): mock_summary_lister.return_value.run.return_value = ([], False) + mock_query_lister.return_value.run.return_value = ([], False) params_string = urlencode( { - "console_log_filters": '[{"key": "console_log_level", "value": ["warn", "error"], "operator": "exact", "type": "recording"}]', + "console_log_filters": '[{"key": "console_log_level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}]', "user_modified_filters": '{"my_filter": "something"}', + "as_query": False, } ) self.client.get(f"/api/projects/{self.team.id}/session_recordings?{params_string}") assert len(mock_summary_lister.call_args_list) == 1 + assert len(mock_query_lister.call_args_list) == 0 filter_passed_to_mock: SessionRecordingsFilter = mock_summary_lister.call_args_list[0].kwargs["filter"] console_filter = cast(Property, filter_passed_to_mock.console_log_filters.values[0]) assert console_filter.value == ["warn", "error"] @@ -197,6 +204,44 @@ def test_console_log_filters_are_correctly_passed_to_listing(self, mock_summary_ groups=ANY, ) + @patch("posthoganalytics.capture") + @patch("posthog.session_recordings.session_recording_api.SessionRecordingListFromFilters") + @patch("posthog.session_recordings.session_recording_api.list_recordings_from_query") + def test_console_log_filters_are_correctly_passed_to_listing_when_query_is_used( + self, mock_query_lister, mock_summary_lister, mock_capture + ): + mock_summary_lister.return_value.run.return_value = ([], False) + mock_query_lister.return_value = ([], False) + + params_string = urlencode( + { + "console_log_filters": '[{"key": "console_log_level", "value": ["warn", "error"], "operator": "exact", "type": "log_entry"}]', + "user_modified_filters": '{"my_filter": "something"}', + "as_query": True, + } + ) + self.client.get(f"/api/projects/{self.team.id}/session_recordings?{params_string}") + + assert len(mock_summary_lister.call_args_list) == 0 + assert len(mock_query_lister.call_args_list) == 1 + query_passed_to_mock: RecordingsQuery = mock_query_lister.call_args_list[0][0][0] + maybe_the_filter = ( + query_passed_to_mock.console_log_filters[0] if query_passed_to_mock.console_log_filters else None + ) + assert maybe_the_filter is not None + console_filter = cast(LogEntryPropertyFilter, maybe_the_filter) + assert console_filter.value == ["warn", "error"] + assert mock_capture.call_args_list[0] == call( + self.user.distinct_id, + "recording list filters changed", + properties={ + "$current_url": ANY, + "$session_id": ANY, + "partial_filter_chosen_my_filter": "something", + }, + groups=ANY, + ) + @snapshot_postgres_queries def test_listing_recordings_is_not_nplus1_for_persons(self): with freeze_time("2022-06-03T12:00:00.000Z"): diff --git a/posthog/settings/session_replay.py b/posthog/settings/session_replay.py index 08ddbce6dcb65..9bb53a501e905 100644 --- a/posthog/settings/session_replay.py +++ b/posthog/settings/session_replay.py @@ -16,17 +16,6 @@ "REALTIME_SNAPSHOTS_FROM_REDIS_ATTEMPT_TIMEOUT_SECONDS", 0.2, type_cast=float ) -REPLAY_EMBEDDINGS_ALLOWED_TEAMS: list[str] = get_list(get_from_env("REPLAY_EMBEDDINGS_ALLOWED_TEAM", "", type_cast=str)) -REPLAY_EMBEDDINGS_BATCH_SIZE = get_from_env("REPLAY_EMBEDDINGS_BATCH_SIZE", 10, type_cast=int) -REPLAY_EMBEDDINGS_MIN_DURATION_SECONDS = get_from_env("REPLAY_EMBEDDINGS_MIN_DURATION_SECONDS", 30, type_cast=int) -REPLAY_EMBEDDINGS_CALCULATION_CELERY_INTERVAL_SECONDS = get_from_env( - "REPLAY_EMBEDDINGS_CALCULATION_CELERY_INTERVAL_SECONDS", 150, type_cast=int -) -REPLAY_EMBEDDINGS_CLUSTERING_DBSCAN_EPS = get_from_env("REPLAY_EMBEDDINGS_CLUSTERING_DBSCAN_EPS", 0.2, type_cast=float) -REPLAY_EMBEDDINGS_CLUSTERING_DBSCAN_MIN_SAMPLES = get_from_env( - "REPLAY_EMBEDDINGS_CLUSTERING_DBSCAN_MIN_SAMPLES", 10, type_cast=int -) - REPLAY_MESSAGE_TOO_LARGE_SAMPLE_RATE = get_from_env("REPLAY_MESSAGE_TOO_LARGE_SAMPLE_RATE", 0, type_cast=float) REPLAY_MESSAGE_TOO_LARGE_SAMPLE_BUCKET = get_from_env( "REPLAY_MESSAGE_TOO_LARGE_SAMPLE_BUCKET", "posthog-cloud-prod-us-east-1-k8s-replay-samples" diff --git a/posthog/settings/temporal.py b/posthog/settings/temporal.py index 34450437c6dcd..33daed600cebf 100644 --- a/posthog/settings/temporal.py +++ b/posthog/settings/temporal.py @@ -16,9 +16,15 @@ MAX_CONCURRENT_ACTIVITIES: int | None = get_from_env("MAX_CONCURRENT_ACTIVITIES", None, optional=True, type_cast=int) BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 50 # 50MB +BATCH_EXPORT_S3_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES: int = get_from_env( + "BATCH_EXPORT_S3_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES", 0, type_cast=int +) BATCH_EXPORT_SNOWFLAKE_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 100 # 100MB BATCH_EXPORT_POSTGRES_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 50 # 50MB BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 100 # 100MB +BATCH_EXPORT_BIGQUERY_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES: int = get_from_env( + "BATCH_EXPORT_BIGQUERY_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES", 0, type_cast=int +) BATCH_EXPORT_HTTP_UPLOAD_CHUNK_SIZE_BYTES: int = 1024 * 1024 * 50 # 50MB BATCH_EXPORT_HTTP_BATCH_SIZE: int = 5000 BATCH_EXPORT_BUFFER_QUEUE_MAX_SIZE_BYTES: int = 1024 * 1024 * 300 # 300MB diff --git a/posthog/tasks/alerts/checks.py b/posthog/tasks/alerts/checks.py index 83d1fc7bbd1cf..4738ca45d3ade 100644 --- a/posthog/tasks/alerts/checks.py +++ b/posthog/tasks/alerts/checks.py @@ -2,7 +2,8 @@ import traceback from datetime import datetime, timedelta, UTC -from typing import Any, cast +from typing import cast +from collections.abc import Callable from dateutil.relativedelta import relativedelta from celery import shared_task @@ -36,8 +37,7 @@ alert_calculation_interval_to_relativedelta, ) from posthog.tasks.alerts.trends import check_trends_alert -from posthog.ph_client import get_ph_client -from posthoganalytics import Posthog +from posthog.ph_client import ph_us_client logger = structlog.get_logger(__name__) @@ -77,13 +77,6 @@ def __init__(self, err: Exception): ANIRUDH_DISTINCT_ID = "wcPbDRs08GtNzrNIXfzHvYAkwUaekW7UrAo4y3coznT" -def _capture_ph_event(ph_client: Posthog | None, *args: Any, **kwargs: Any) -> None: - if ph_client: - ph_client.capture(*args, **kwargs) - - return None - - @shared_task(ignore_result=True) def checks_cleanup_task() -> None: AlertCheck.clean_up_old_checks() @@ -99,7 +92,6 @@ def alerts_backlog_task() -> None: - hourly alerts - alerts that haven't been checked in the last hour + 5min - daily alerts - alerts that haven't been checked in the last hour + 15min """ - ph_client = get_ph_client() now = datetime.now(UTC) hourly_alerts_breaching_sla = AlertConfiguration.objects.filter( @@ -112,16 +104,6 @@ def alerts_backlog_task() -> None: HOURLY_ALERTS_BACKLOG_GAUGE.set(hourly_alerts_breaching_sla) - _capture_ph_event( - ph_client, - ANIRUDH_DISTINCT_ID, - "alert check backlog", - properties={ - "alert_check_frequency": AlertCalculationInterval.HOURLY, - "backlog": hourly_alerts_breaching_sla, - }, - ) - now = datetime.now(UTC) daily_alerts_breaching_sla = AlertConfiguration.objects.filter( @@ -134,20 +116,27 @@ def alerts_backlog_task() -> None: DAILY_ALERTS_BACKLOG_GAUGE.set(daily_alerts_breaching_sla) - _capture_ph_event( - ph_client, - ANIRUDH_DISTINCT_ID, - "alert check backlog", - properties={ - "alert_check_frequency": AlertCalculationInterval.DAILY, - "backlog": daily_alerts_breaching_sla, - }, - ) + with ph_us_client() as capture_ph_event: + capture_ph_event( + ANIRUDH_DISTINCT_ID, + "alert check backlog", + properties={ + "calculation_interval": AlertCalculationInterval.DAILY, + "backlog": daily_alerts_breaching_sla, + }, + ) + + capture_ph_event( + ANIRUDH_DISTINCT_ID, + "alert check backlog", + properties={ + "calculation_interval": AlertCalculationInterval.HOURLY, + "backlog": hourly_alerts_breaching_sla, + }, + ) # sleeping 30s for prometheus to pick up the metrics sent during task time.sleep(30) - if ph_client: - ph_client.shutdown() @shared_task( @@ -228,12 +217,11 @@ def check_alerts_task() -> None: ) # @limit_concurrency(5) Concurrency controlled by CeleryQueue.ALERTS for now def check_alert_task(alert_id: str) -> None: - check_alert(alert_id) - + with ph_us_client() as capture_ph_event: + check_alert(alert_id, capture_ph_event) -def check_alert(alert_id: str) -> None: - ph_client = get_ph_client() +def check_alert(alert_id: str, capture_ph_event: Callable = lambda *args, **kwargs: None) -> None: try: alert = AlertConfiguration.objects.get(id=alert_id, enabled=True) except AlertConfiguration.DoesNotExist: @@ -276,14 +264,13 @@ def check_alert(alert_id: str) -> None: alert.save() try: - check_alert_and_notify_atomically(alert, ph_client) + check_alert_and_notify_atomically(alert, capture_ph_event) except Exception as err: ALERT_CHECK_ERROR_COUNTER.inc() user = cast(User, alert.created_by) - _capture_ph_event( - ph_client, - cast(str, user.distinct_id), + capture_ph_event( + user.distinct_id, "alert check failed", properties={ "alert_id": alert.id, @@ -312,12 +299,9 @@ def check_alert(alert_id: str) -> None: alert.is_calculating = False alert.save() - if ph_client: - ph_client.shutdown() - @transaction.atomic -def check_alert_and_notify_atomically(alert: AlertConfiguration, ph_client: Posthog | None) -> None: +def check_alert_and_notify_atomically(alert: AlertConfiguration, capture_ph_event: Callable) -> None: """ Computes insight results, checks alert for breaches and notifies user. Only commits updates to alert state if all of the above complete successfully. @@ -331,12 +315,12 @@ def check_alert_and_notify_atomically(alert: AlertConfiguration, ph_client: Post user = cast(User, alert.created_by) # Event to count alert checks - _capture_ph_event( - ph_client, - cast(str, user.distinct_id), + capture_ph_event( + user.distinct_id, "alert check", properties={ "alert_id": alert.id, + "calculation_interval": alert.calculation_interval, }, ) @@ -354,9 +338,8 @@ def check_alert_and_notify_atomically(alert: AlertConfiguration, ph_client: Post except Exception as err: error_message = f"Alert id = {alert.id}, failed to evaluate" - _capture_ph_event( - ph_client, - cast(str, user.distinct_id), + capture_ph_event( + user.distinct_id, "alert check failed", properties={ "alert_id": alert.id, @@ -385,25 +368,12 @@ def check_alert_and_notify_atomically(alert: AlertConfiguration, ph_client: Post logger.info("Check state is %s", alert_check.state, alert_id=alert.id) case AlertState.ERRORED: logger.info("Sending alert error notifications", alert_id=alert.id, error=alert_check.error) - # TODO: uncomment this after checking errors sent send_notifications_for_errors(alert, alert_check.error) case AlertState.FIRING: assert breaches is not None send_notifications_for_breaches(alert, breaches) except Exception as err: error_message = f"AlertCheckError: error sending notifications for alert_id = {alert.id}" - - _capture_ph_event( - ph_client, - cast(str, user.distinct_id), - "alert check failed", - properties={ - "alert_id": alert.id, - "error": error_message, - "traceback": traceback.format_exc(), - }, - ) - logger.exception(error_message, exc_info=err) capture_exception(Exception(error_message)) diff --git a/posthog/tasks/exports/ordered_csv_renderer.py b/posthog/tasks/exports/ordered_csv_renderer.py index 5b70e9bed911c..4e5ed7eddc78d 100644 --- a/posthog/tasks/exports/ordered_csv_renderer.py +++ b/posthog/tasks/exports/ordered_csv_renderer.py @@ -18,7 +18,7 @@ def tablize(self, data: Any, header: Any = None, labels: Any = None) -> Generato header = data.header if not data: - return [] + return # First, flatten the data (i.e., convert it to a list of # dictionaries that are each exactly one level deep). The key for diff --git a/posthog/tasks/scheduled.py b/posthog/tasks/scheduled.py index 0d9628490b788..5972857f7fd2b 100644 --- a/posthog/tasks/scheduled.py +++ b/posthog/tasks/scheduled.py @@ -19,7 +19,6 @@ calculate_cohort, calculate_decide_usage, calculate_external_data_rows_synced, - calculate_replay_embeddings, check_async_migration_health, check_flags_to_rollback, clean_stale_partials, @@ -288,16 +287,6 @@ def setup_periodic_tasks(sender: Celery, **kwargs: Any) -> None: ) if settings.EE_AVAILABLE: - # every interval seconds, we calculate N replay embeddings - # the goal is to process _enough_ every 24 hours that - # there is a meaningful playlist to test with - add_periodic_task_with_expiry( - sender, - settings.REPLAY_EMBEDDINGS_CALCULATION_CELERY_INTERVAL_SECONDS, - calculate_replay_embeddings.s(), - name="calculate replay embeddings", - ) - sender.add_periodic_task( crontab(hour="0", minute=str(randrange(0, 40))), clickhouse_send_license_usage.s(), diff --git a/posthog/tasks/tasks.py b/posthog/tasks/tasks.py index a7e291707914d..7cccde1b31249 100644 --- a/posthog/tasks/tasks.py +++ b/posthog/tasks/tasks.py @@ -910,34 +910,6 @@ def ee_persist_finished_recordings() -> None: persist_finished_recordings() -# this task runs a CH query and triggers other tasks -# it can run on the default queue -@shared_task(ignore_result=True) -def calculate_replay_embeddings() -> None: - try: - from ee.tasks.replay import generate_recordings_embeddings_batch - - generate_recordings_embeddings_batch() - except ImportError: - pass - except Exception as e: - logger.error("Failed to calculate replay embeddings", error=e, exc_info=True) - - -# this task triggers other tasks -# it can run on the default queue -@shared_task(ignore_result=True) -def calculate_replay_error_clusters() -> None: - try: - from ee.tasks.replay import generate_replay_embedding_error_clusters - - generate_replay_embedding_error_clusters() - except ImportError: - pass - except Exception as e: - logger.error("Failed to calculate replay error clusters", error=e, exc_info=True) - - @shared_task(ignore_result=True) def calculate_external_data_rows_synced() -> None: try: diff --git a/posthog/tasks/test/__snapshots__/test_usage_report.ambr b/posthog/tasks/test/__snapshots__/test_usage_report.ambr index 83f4787642836..2230c532da5ca 100644 --- a/posthog/tasks/test/__snapshots__/test_usage_report.ambr +++ b/posthog/tasks/test/__snapshots__/test_usage_report.ambr @@ -3,7 +3,7 @@ ''' SELECT team_id, - multiIf(event LIKE 'helicone%', 'helicone_events', event LIKE 'langfuse%', 'langfuse_events', event LIKE 'keywords_ai%', 'keywords_ai_events', event LIKE 'traceloop%', 'traceloop_events', JSONExtractString(properties, '$lib') = 'web', 'web_events', JSONExtractString(properties, '$lib') = 'posthog-js-lite', 'web_lite_events', JSONExtractString(properties, '$lib') = 'posthog-node', 'node_events', JSONExtractString(properties, '$lib') = 'posthog-android', 'android_events', JSONExtractString(properties, '$lib') = 'posthog-flutter', 'flutter_events', JSONExtractString(properties, '$lib') = 'posthog-ios', 'ios_events', JSONExtractString(properties, '$lib') = 'posthog-go', 'go_events', JSONExtractString(properties, '$lib') = 'posthog-java', 'java_events', JSONExtractString(properties, '$lib') = 'posthog-react-native', 'react_native_events', JSONExtractString(properties, '$lib') = 'posthog-ruby', 'ruby_events', JSONExtractString(properties, '$lib') = 'posthog-python', 'python_events', JSONExtractString(properties, '$lib') = 'posthog-php', 'php_events', 'other') AS metric, + multiIf(event LIKE 'helicone%', 'helicone_events', event LIKE 'langfuse%', 'langfuse_events', event LIKE 'keywords_ai%', 'keywords_ai_events', event LIKE 'traceloop%', 'traceloop_events', JSONExtractString(properties, '$lib') = 'web', 'web_events', JSONExtractString(properties, '$lib') = 'js', 'web_lite_events', JSONExtractString(properties, '$lib') = 'posthog-node', 'node_events', JSONExtractString(properties, '$lib') = 'posthog-android', 'android_events', JSONExtractString(properties, '$lib') = 'posthog-flutter', 'flutter_events', JSONExtractString(properties, '$lib') = 'posthog-ios', 'ios_events', JSONExtractString(properties, '$lib') = 'posthog-go', 'go_events', JSONExtractString(properties, '$lib') = 'posthog-java', 'java_events', JSONExtractString(properties, '$lib') = 'posthog-react-native', 'react_native_events', JSONExtractString(properties, '$lib') = 'posthog-ruby', 'ruby_events', JSONExtractString(properties, '$lib') = 'posthog-python', 'python_events', JSONExtractString(properties, '$lib') = 'posthog-php', 'php_events', 'other') AS metric, count(1) as count FROM events WHERE timestamp BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index 7b25b2611b9da..4b5edb6672bfc 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -353,7 +353,7 @@ def _create_sample_usage_data(self) -> None: # Add events for each SDK sdks = [ "web", - "posthog-js-lite", + "js", "posthog-node", "posthog-android", "posthog-flutter", diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index ce8e09c2689c2..968354fff3032 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -475,7 +475,7 @@ def get_all_event_metrics_in_period(begin: datetime, end: datetime) -> dict[str, event LIKE 'keywords_ai%%', 'keywords_ai_events', event LIKE 'traceloop%%', 'traceloop_events', {lib_expression} = 'web', 'web_events', - {lib_expression} = 'posthog-js-lite', 'web_lite_events', + {lib_expression} = 'js', 'web_lite_events', {lib_expression} = 'posthog-node', 'node_events', {lib_expression} = 'posthog-android', 'android_events', {lib_expression} = 'posthog-flutter', 'flutter_events', diff --git a/posthog/temporal/batch_exports/bigquery_batch_export.py b/posthog/temporal/batch_exports/bigquery_batch_export.py index e99ba77f3c1bf..ae5a7f58733c9 100644 --- a/posthog/temporal/batch_exports/bigquery_batch_export.py +++ b/posthog/temporal/batch_exports/bigquery_batch_export.py @@ -3,9 +3,7 @@ import contextlib import dataclasses import datetime as dt -import functools import json -import operator import pyarrow as pa import structlog @@ -30,28 +28,26 @@ default_fields, execute_batch_export_insert_activity, get_data_interval, - raise_on_produce_task_failure, start_batch_export_run, - start_produce_batch_export_record_batches, ) from posthog.temporal.batch_exports.heartbeat import ( BatchExportRangeHeartbeatDetails, DateRange, should_resume_from_activity_heartbeat, ) -from posthog.temporal.batch_exports.metrics import ( - get_bytes_exported_metric, - get_rows_exported_metric, +from posthog.temporal.batch_exports.spmc import ( + Consumer, + Producer, + RecordBatchQueue, + run_consumer_loop, + wait_for_schema_or_producer, ) from posthog.temporal.batch_exports.temporary_file import ( - BatchExportWriter, - FlushCallable, - JSONLBatchExportWriter, - ParquetBatchExportWriter, + BatchExportTemporaryFile, + WriterFormat, ) from posthog.temporal.batch_exports.utils import ( JsonType, - cast_record_batch_json_columns, set_status_to_running_task, ) from posthog.temporal.common.clickhouse import get_client @@ -60,6 +56,20 @@ logger = structlog.get_logger() +NON_RETRYABLE_ERROR_TYPES = [ + # Raised on missing permissions. + "Forbidden", + # Invalid token. + "RefreshError", + # Usually means the dataset or project doesn't exist. + "NotFound", + # Raised when something about dataset is wrong (not alphanumeric, too long, etc). + "BadRequest", + # Raised when table_id isn't valid. Sadly, `ValueError` is rather generic, but we + # don't anticipate a `ValueError` thrown from our own export code. + "ValueError", +] + def get_bigquery_fields_from_record_schema( record_schema: pa.Schema, known_json_columns: list[str] @@ -346,6 +356,50 @@ def bigquery_default_fields() -> list[BatchExportField]: return batch_export_fields +class BigQueryConsumer(Consumer): + """Implementation of a SPMC pipeline Consumer for BigQuery batch exports.""" + + def __init__( + self, + heartbeater: Heartbeater, + heartbeat_details: BigQueryHeartbeatDetails, + data_interval_start: dt.datetime | str | None, + bigquery_client: BigQueryClient, + bigquery_table: bigquery.Table, + table_schema: list[BatchExportField], + ): + super().__init__(heartbeater, heartbeat_details, data_interval_start) + self.bigquery_client = bigquery_client + self.bigquery_table = bigquery_table + self.table_schema = table_schema + + async def flush( + self, + batch_export_file: BatchExportTemporaryFile, + records_since_last_flush: int, + bytes_since_last_flush: int, + flush_counter: int, + last_date_range: DateRange, + is_last: bool, + error: Exception | None, + ): + """Implement flushing by loading batch export files to BigQuery""" + await self.logger.adebug( + "Loading %s records of size %s bytes to BigQuery table '%s'", + records_since_last_flush, + bytes_since_last_flush, + self.bigquery_table, + ) + + await self.bigquery_client.load_jsonl_file(batch_export_file, self.bigquery_table, self.table_schema) + + await self.logger.adebug("Loaded %s to BigQuery table '%s'", records_since_last_flush, self.bigquery_table) + self.rows_exported_counter.add(records_since_last_flush) + self.bytes_exported_counter.add(bytes_since_last_flush) + + self.heartbeat_details.track_done_range(last_date_range, self.data_interval_start) + + @activity.defn async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> RecordsCompleted: """Activity streams data from ClickHouse to BigQuery.""" @@ -399,43 +453,38 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records ) data_interval_end = dt.datetime.fromisoformat(inputs.data_interval_end) full_range = (data_interval_start, data_interval_end) - queue, produce_task = start_produce_batch_export_record_batches( - client=client, + + queue = RecordBatchQueue(max_size_bytes=settings.BATCH_EXPORT_BIGQUERY_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES) + producer = Producer(clickhouse_client=client) + producer_task = producer.start( + queue=queue, model_name=model_name, is_backfill=inputs.is_backfill, team_id=inputs.team_id, full_range=full_range, done_ranges=done_ranges, - exclude_events=inputs.exclude_events, - include_events=inputs.include_events, fields=fields, destination_default_fields=bigquery_default_fields(), use_latest_schema=True, + exclude_events=inputs.exclude_events, + include_events=inputs.include_events, extra_query_parameters=extra_query_parameters, ) - - get_schema_task = asyncio.create_task(queue.get_schema()) - - await asyncio.wait( - [get_schema_task, produce_task], - return_when=asyncio.FIRST_COMPLETED, + records_completed = 0 + + record_batch_schema = await wait_for_schema_or_producer(queue, producer_task) + if record_batch_schema is None: + return records_completed + + record_batch_schema = pa.schema( + # NOTE: For some reason, some batches set non-nullable fields as non-nullable, whereas other + # record batches have them as nullable. + # Until we figure it out, we set all fields to nullable. There are some fields we know + # are not nullable, but I'm opting for the more flexible option until we out why schemas differ + # between batches. + [field.with_nullable(True) for field in record_batch_schema if field.name != "_inserted_at"] ) - # Finishing producing happens sequentially after putting to queue and setting the schema. - # So, either we finished producing and setting the schema tasks, or we finished without - # putting anything in the queue. - if get_schema_task.done(): - # In the first case, we'll land here. - # The schema is available, and the queue is not empty, so we can start the batch export. - record_batch_schema = get_schema_task.result() - else: - # In the second case, we'll land here: We finished producing without putting anything. - # Since we finished producing with an empty queue, there is nothing to batch export. - # We could have also failed, so we need to re-raise that exception to allow a retry if - # that's the case. - await raise_on_produce_task_failure(produce_task) - return 0 - if inputs.use_json_type is True: json_type = "JSON" json_columns = ["properties", "set", "set_once", "person_properties"] @@ -461,9 +510,6 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records else: schema = get_bigquery_fields_from_record_schema(record_batch_schema, known_json_columns=json_columns) - rows_exported = get_rows_exported_metric() - bytes_exported = get_bytes_exported_metric() - # TODO: Expose this as a configuration parameter # Currently, only allow merging persons model, as it's required. # Although all exports could potentially benefit from merging, merging can have an impact on cost, @@ -492,62 +538,23 @@ async def insert_into_bigquery_activity(inputs: BigQueryInsertInputs) -> Records delete=requires_merge, ) as bigquery_stage_table, ): - - async def flush_to_bigquery( - local_results_file, - records_since_last_flush: int, - bytes_since_last_flush: int, - flush_counter: int, - last_date_range, - last: bool, - error: Exception | None, - ): - table = bigquery_stage_table if requires_merge else bigquery_table - await logger.adebug( - "Loading %s records of size %s bytes to BigQuery table '%s'", - records_since_last_flush, - bytes_since_last_flush, - table, - ) - - await bq_client.load_jsonl_file(local_results_file, table, schema) - - await logger.adebug("Loading to BigQuery table '%s' finished", table) - rows_exported.add(records_since_last_flush) - bytes_exported.add(bytes_since_last_flush) - - details.track_done_range(last_date_range, data_interval_start) - heartbeater.set_from_heartbeat_details(details) - - flush_tasks = [] - while not queue.empty() or not produce_task.done(): - await logger.adebug("Starting record batch writer") - flush_start_event = asyncio.Event() - task = asyncio.create_task( - consume_batch_export_record_batches( - queue, - produce_task, - flush_start_event, - flush_to_bigquery, - json_columns, - settings.BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES, - ) - ) - - await flush_start_event.wait() - - flush_tasks.append(task) - - await logger.adebug("Finished producing, now waiting on any pending flush tasks") - await asyncio.wait(flush_tasks) - - await raise_on_produce_task_failure(produce_task) - await logger.adebug("Successfully consumed all record batches") - - details.complete_done_ranges(inputs.data_interval_end) - heartbeater.set_from_heartbeat_details(details) - - records_total = functools.reduce(operator.add, (task.result() for task in flush_tasks)) + records_completed = await run_consumer_loop( + queue=queue, + consumer_cls=BigQueryConsumer, + producer_task=producer_task, + heartbeater=heartbeater, + heartbeat_details=details, + data_interval_end=data_interval_end, + data_interval_start=data_interval_start, + schema=record_batch_schema, + writer_format=WriterFormat.JSONL, + max_bytes=settings.BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES, + non_retryable_error_types=NON_RETRYABLE_ERROR_TYPES, + json_columns=json_columns, + bigquery_client=bq_client, + bigquery_table=bigquery_stage_table if requires_merge else bigquery_table, + table_schema=schema, + ) if requires_merge: merge_key = ( @@ -560,98 +567,7 @@ async def flush_to_bigquery( merge_key=merge_key, ) - return records_total - - -async def consume_batch_export_record_batches( - queue: asyncio.Queue, - produce_task: asyncio.Task, - flush_start_event: asyncio.Event, - flush_to_bigquery: FlushCallable, - json_columns: list[str], - max_bytes: int, -): - """Consume batch export record batches from queue into a writing loop. - - Each record will be written to a temporary file, and flushed after - configured `max_bytes`. Flush is done on context manager exit by - `JSONLBatchExportWriter`. - - This coroutine reports when flushing will start by setting the - `flush_start_event`. This is used by the main thread to start a new writer - task as flushing is about to begin, since that can be too slow to do - sequentially. - - If there are not enough events to fill up `max_bytes`, the writing - loop will detect that there are no more events produced and shut itself off - by using the `done_event`, which should be set by the queue producer. - - Arguments: - queue: The queue we will be listening on for record batches. - produce_task: Producer task we check to be done if queue is empty, as - that would indicate we have finished reading record batches before - hitting the flush limit, so we have to break early. - flush_to_start_event: Event set by us when flushing is to about to - start. - json_columns: Used to cast columns of the record batch to JSON. - max_bytes: Max bytes to write before flushing. - - Returns: - Number of total records written and flushed in this task. - """ - writer = JSONLBatchExportWriter( - max_bytes=max_bytes, - flush_callable=flush_to_bigquery, - ) - - async with writer.open_temporary_file(): - await logger.adebug("Starting record batch writing loop") - while True: - try: - record_batch = queue.get_nowait() - except asyncio.QueueEmpty: - if produce_task.done(): - await logger.adebug("Empty queue with no more events being produced, closing writer loop") - flush_start_event.set() - # Exit context manager to trigger flush - break - else: - await asyncio.sleep(0.1) - continue - - record_batch = cast_record_batch_json_columns(record_batch, json_columns=json_columns) - await writer.write_record_batch(record_batch, flush=False) - - if writer.should_flush(): - await logger.adebug("Writer finished, ready to flush events") - flush_start_event.set() - # Exit context manager to trigger flush - break - - await logger.adebug("Completed %s records", writer.records_total) - return writer.records_total - - -def get_batch_export_writer( - inputs: BigQueryInsertInputs, flush_callable: FlushCallable, max_bytes: int, schema: pa.Schema | None = None -) -> BatchExportWriter: - """Return the `BatchExportWriter` corresponding to the inputs for this BigQuery batch export.""" - writer: BatchExportWriter - - if inputs.use_json_type is False: - # JSON field is not supported with Parquet - writer = ParquetBatchExportWriter( - max_bytes=max_bytes, - flush_callable=flush_callable, - schema=schema, - ) - else: - writer = JSONLBatchExportWriter( - max_bytes=settings.BATCH_EXPORT_BIGQUERY_UPLOAD_CHUNK_SIZE_BYTES, - flush_callable=flush_callable, - ) - - return writer + return records_completed @workflow.defn(name="bigquery-export", failure_exception_types=[workflow.NondeterminismError]) @@ -729,18 +645,6 @@ async def run(self, inputs: BigQueryBatchExportInputs): insert_into_bigquery_activity, insert_inputs, interval=inputs.interval, - non_retryable_error_types=[ - # Raised on missing permissions. - "Forbidden", - # Invalid token. - "RefreshError", - # Usually means the dataset or project doesn't exist. - "NotFound", - # Raised when something about dataset is wrong (not alphanumeric, too long, etc). - "BadRequest", - # Raised when table_id isn't valid. Sadly, `ValueError` is rather generic, but we - # don't anticipate a `ValueError` thrown from our own export code. - "ValueError", - ], + non_retryable_error_types=NON_RETRYABLE_ERROR_TYPES, finish_inputs=finish_inputs, ) diff --git a/posthog/temporal/batch_exports/redshift_batch_export.py b/posthog/temporal/batch_exports/redshift_batch_export.py index d9d634d78858c..3b02efddb5a0b 100644 --- a/posthog/temporal/batch_exports/redshift_batch_export.py +++ b/posthog/temporal/batch_exports/redshift_batch_export.py @@ -32,6 +32,11 @@ start_batch_export_run, start_produce_batch_export_record_batches, ) +from posthog.temporal.batch_exports.heartbeat import ( + BatchExportRangeHeartbeatDetails, + DateRange, + should_resume_from_activity_heartbeat, +) from posthog.temporal.batch_exports.metrics import get_rows_exported_metric from posthog.temporal.batch_exports.postgres_batch_export import ( Fields, @@ -47,11 +52,6 @@ from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.heartbeat import Heartbeater from posthog.temporal.common.logger import configure_temporal_worker_logger -from posthog.temporal.batch_exports.heartbeat import ( - BatchExportRangeHeartbeatDetails, - DateRange, - should_resume_from_activity_heartbeat, -) def remove_escaped_whitespace_recursive(value): @@ -715,6 +715,8 @@ async def run(self, inputs: RedshiftBatchExportInputs): "StringDataRightTruncation", # Raised by our PostgreSQL client when failing to connect after several attempts. "PostgreSQLConnectionError", + # Column missing in Redshift, likely the schema was altered. + "UndefinedColumn", ], finish_inputs=finish_inputs, ) diff --git a/posthog/temporal/batch_exports/s3_batch_export.py b/posthog/temporal/batch_exports/s3_batch_export.py index 41383c8e17114..7201af91d2b5a 100644 --- a/posthog/temporal/batch_exports/s3_batch_export.py +++ b/posthog/temporal/batch_exports/s3_batch_export.py @@ -1,12 +1,13 @@ import asyncio +import collections.abc import contextlib import dataclasses import datetime as dt import io import json +import operator import posixpath import typing -import collections.abc import aioboto3 import botocore.exceptions @@ -30,36 +31,62 @@ default_fields, execute_batch_export_insert_activity, get_data_interval, - iter_model_records, start_batch_export_run, wait_for_delta_past_data_interval_end, ) -from posthog.temporal.batch_exports.metrics import ( - get_bytes_exported_metric, - get_rows_exported_metric, +from posthog.temporal.batch_exports.heartbeat import ( + BatchExportRangeHeartbeatDetails, + DateRange, + HeartbeatParseError, + should_resume_from_activity_heartbeat, +) +from posthog.temporal.batch_exports.spmc import ( + Consumer, + Producer, + RecordBatchQueue, + run_consumer_loop, + wait_for_schema_or_producer, ) from posthog.temporal.batch_exports.temporary_file import ( BatchExportTemporaryFile, - BatchExportWriter, - FlushCallable, - JSONLBatchExportWriter, - ParquetBatchExportWriter, - UnsupportedFileFormatError, + WriterFormat, ) from posthog.temporal.batch_exports.utils import ( - apeek_first_and_rewind, - cast_record_batch_json_columns, set_status_to_running_task, ) from posthog.temporal.common.clickhouse import get_client from posthog.temporal.common.heartbeat import Heartbeater from posthog.temporal.common.logger import bind_temporal_worker_logger -from posthog.temporal.batch_exports.heartbeat import ( - BatchExportRangeHeartbeatDetails, - DateRange, - HeartbeatParseError, - should_resume_from_activity_heartbeat, -) + +NON_RETRYABLE_ERROR_TYPES = [ + # S3 parameter validation failed. + "ParamValidationError", + # This error usually indicates credentials are incorrect or permissions are missing. + "ClientError", + # An S3 bucket doesn't exist. + "NoSuchBucket", + # Couldn't connect to custom S3 endpoint + "EndpointConnectionError", + # Input contained an empty S3 endpoint URL + "EmptyS3EndpointURLError", + # User provided an invalid S3 key + "InvalidS3Key", + # All consumers failed with non-retryable errors. + "RecordBatchConsumerNonRetryableExceptionGroup", +] + +FILE_FORMAT_EXTENSIONS = { + "Parquet": "parquet", + "JSONLines": "jsonl", +} + +COMPRESSION_EXTENSIONS = { + "gzip": "gz", + "snappy": "sz", + "brotli": "br", + "ztsd": "zst", + "lz4": "lz4", +} def get_allowed_template_variables(inputs) -> dict[str, str]: @@ -78,20 +105,6 @@ def get_allowed_template_variables(inputs) -> dict[str, str]: } -FILE_FORMAT_EXTENSIONS = { - "Parquet": "parquet", - "JSONLines": "jsonl", -} - -COMPRESSION_EXTENSIONS = { - "gzip": "gz", - "snappy": "sz", - "brotli": "br", - "ztsd": "zst", - "lz4": "lz4", -} - - def get_s3_key(inputs) -> str: """Return an S3 key given S3InsertInputs.""" template_variables = get_allowed_template_variables(inputs) @@ -199,6 +212,7 @@ def __init__( self.kms_key_id = kms_key_id self.upload_id: str | None = None self.parts: list[Part] = [] + self.pending_parts: list[Part] = [] if self.endpoint_url == "": raise EmptyS3EndpointURLError() @@ -214,7 +228,7 @@ def to_state(self) -> S3MultiPartUploadState: @property def part_number(self): """Return the current part number.""" - return len(self.parts) + return len(self.parts) + len(self.pending_parts) def is_upload_in_progress(self) -> bool: """Whether this S3MultiPartUpload is in progress or not.""" @@ -272,12 +286,13 @@ async def complete(self) -> str: if self.is_upload_in_progress() is False: raise NoUploadInProgressError() + sorted_parts = sorted(self.parts, key=operator.itemgetter("PartNumber")) async with self.s3_client() as s3_client: response = await s3_client.complete_multipart_upload( Bucket=self.bucket_name, Key=self.key, UploadId=self.upload_id, - MultipartUpload={"Parts": self.parts}, + MultipartUpload={"Parts": sorted_parts}, ) self.upload_id = None @@ -311,6 +326,8 @@ async def upload_part( ): """Upload a part of this multi-part upload.""" next_part_number = self.part_number + 1 + part = {"PartNumber": next_part_number, "ETag": ""} + self.pending_parts.append(part) if rewind is True: body.rewind() @@ -335,7 +352,9 @@ async def upload_part( finally: reader.detach() # BufferedReader closes the file otherwise. - self.parts.append({"PartNumber": next_part_number, "ETag": etag}) + self.pending_parts.pop(self.pending_parts.index(part)) + part["ETag"] = etag + self.parts.append(part) async def upload_part_retryable( self, @@ -441,6 +460,52 @@ def append_upload_state(self, upload_state: S3MultiPartUploadState): self.upload_state.parts.append(part) +class S3Consumer(Consumer): + def __init__( + self, + heartbeater: Heartbeater, + heartbeat_details: S3HeartbeatDetails, + data_interval_start: dt.datetime | str | None, + s3_upload: S3MultiPartUpload, + ): + super().__init__(heartbeater, heartbeat_details, data_interval_start) + self.heartbeat_details: S3HeartbeatDetails = heartbeat_details + self.s3_upload = s3_upload + + async def flush( + self, + batch_export_file: BatchExportTemporaryFile, + records_since_last_flush: int, + bytes_since_last_flush: int, + flush_counter: int, + last_date_range: DateRange, + is_last: bool, + error: Exception | None, + ): + if error is not None: + await self.logger.adebug("Error while writing part %d", self.s3_upload.part_number + 1, exc_info=error) + await self.logger.awarning( + "An error was detected while writing part %d. Partial part will not be uploaded in case it can be retried.", + self.s3_upload.part_number + 1, + ) + return + + await self.logger.adebug( + "Uploading part %s containing %s records with size %s bytes", + self.s3_upload.part_number + 1, + records_since_last_flush, + bytes_since_last_flush, + ) + + await self.s3_upload.upload_part(batch_export_file) + + self.rows_exported_counter.add(records_since_last_flush) + self.bytes_exported_counter.add(bytes_since_last_flush) + + self.heartbeat_details.track_done_range(last_date_range, self.data_interval_start) + self.heartbeat_details.append_upload_state(self.s3_upload.to_state()) + + @dataclasses.dataclass class S3InsertInputs: """Inputs for S3 exports.""" @@ -576,143 +641,85 @@ async def insert_into_s3_activity(inputs: S3InsertInputs) -> RecordsCompleted: raise ConnectionError("Cannot establish connection to ClickHouse") s3_upload, details = await initialize_and_resume_multipart_upload(inputs) - - # TODO: Switch to single-producer multiple consumer done_ranges: list[DateRange] = details.done_ranges - if done_ranges: - data_interval_start: str | None = done_ranges[-1][1].isoformat() - else: - data_interval_start = inputs.data_interval_start model: BatchExportModel | BatchExportSchema | None = None if inputs.batch_export_schema is None and "batch_export_model" in { field.name for field in dataclasses.fields(inputs) }: model = inputs.batch_export_model + if model is not None: + model_name = model.name + extra_query_parameters = model.schema["values"] if model.schema is not None else None + fields = model.schema["fields"] if model.schema is not None else None + else: + model_name = "events" + extra_query_parameters = None + fields = None else: model = inputs.batch_export_schema + model_name = "custom" + extra_query_parameters = model["values"] if model is not None else {} + fields = model["fields"] if model is not None else None - record_iterator = iter_model_records( - model=model, - client=client, + data_interval_start = ( + dt.datetime.fromisoformat(inputs.data_interval_start) if inputs.data_interval_start else None + ) + data_interval_end = dt.datetime.fromisoformat(inputs.data_interval_end) + full_range = (data_interval_start, data_interval_end) + + queue = RecordBatchQueue(max_size_bytes=settings.BATCH_EXPORT_S3_RECORD_BATCH_QUEUE_MAX_SIZE_BYTES) + producer = Producer(clickhouse_client=client) + producer_task = producer.start( + queue=queue, + model_name=model_name, + is_backfill=inputs.is_backfill, team_id=inputs.team_id, - interval_start=data_interval_start, - interval_end=inputs.data_interval_end, + full_range=full_range, + done_ranges=done_ranges, + fields=fields, + destination_default_fields=s3_default_fields(), exclude_events=inputs.exclude_events, include_events=inputs.include_events, - is_backfill=inputs.is_backfill, - destination_default_fields=s3_default_fields(), + extra_query_parameters=extra_query_parameters, ) - - first_record_batch, record_iterator = await apeek_first_and_rewind(record_iterator) - records_completed = 0 - if first_record_batch is None: - return records_completed - async with s3_upload as s3_upload: + record_batch_schema = await wait_for_schema_or_producer(queue, producer_task) + if record_batch_schema is None: + return records_completed - async def flush_to_s3( - local_results_file, - records_since_last_flush: int, - bytes_since_last_flush: int, - flush_counter: int, - last_date_range: DateRange, - last: bool, - error: Exception | None, - ): - if error is not None: - await logger.adebug("Error while writing part %d", s3_upload.part_number + 1, exc_info=error) - await logger.awarning( - "An error was detected while writing part %d. Partial part will not be uploaded in case it can be retried.", - s3_upload.part_number + 1, - ) - return - - await logger.adebug( - "Uploading %s part %s containing %s records with size %s bytes", - "last " if last else "", - s3_upload.part_number + 1, - records_since_last_flush, - bytes_since_last_flush, - ) - - await s3_upload.upload_part(local_results_file) - - rows_exported.add(records_since_last_flush) - bytes_exported.add(bytes_since_last_flush) - - details.track_done_range(last_date_range, data_interval_start) - details.append_upload_state(s3_upload.to_state()) - heartbeater.set_from_heartbeat_details(details) - - first_record_batch = cast_record_batch_json_columns(first_record_batch) - column_names = first_record_batch.column_names - column_names.pop(column_names.index("_inserted_at")) - - schema = pa.schema( - # NOTE: For some reason, some batches set non-nullable fields as non-nullable, whereas other - # record batches have them as nullable. - # Until we figure it out, we set all fields to nullable. There are some fields we know - # are not nullable, but I'm opting for the more flexible option until we out why schemas differ - # between batches. - [field.with_nullable(True) for field in first_record_batch.select(column_names).schema] - ) + record_batch_schema = pa.schema( + # NOTE: For some reason, some batches set non-nullable fields as non-nullable, whereas other + # record batches have them as nullable. + # Until we figure it out, we set all fields to nullable. There are some fields we know + # are not nullable, but I'm opting for the more flexible option until we out why schemas differ + # between batches. + [field.with_nullable(True) for field in record_batch_schema if field.name != "_inserted_at"] + ) - writer = get_batch_export_writer( - inputs, - flush_callable=flush_to_s3, + async with s3_upload as s3_upload: + records_completed = await run_consumer_loop( + queue=queue, + consumer_cls=S3Consumer, + producer_task=producer_task, + heartbeater=heartbeater, + heartbeat_details=details, + data_interval_end=data_interval_end, + data_interval_start=data_interval_start, + schema=record_batch_schema, + writer_format=WriterFormat.from_str(inputs.file_format, "S3"), max_bytes=settings.BATCH_EXPORT_S3_UPLOAD_CHUNK_SIZE_BYTES, - schema=schema, + s3_upload=s3_upload, + writer_file_kwargs={"compression": inputs.compression}, + non_retryable_error_types=NON_RETRYABLE_ERROR_TYPES, ) - async with writer.open_temporary_file(): - rows_exported = get_rows_exported_metric() - bytes_exported = get_bytes_exported_metric() - - async for record_batch in record_iterator: - record_batch = cast_record_batch_json_columns(record_batch) - - await writer.write_record_batch(record_batch) - - details.complete_done_ranges(inputs.data_interval_end) - heartbeater.set_from_heartbeat_details(details) - - records_completed = writer.records_total await s3_upload.complete() return records_completed -def get_batch_export_writer( - inputs: S3InsertInputs, flush_callable: FlushCallable, max_bytes: int, schema: pa.Schema | None = None -) -> BatchExportWriter: - """Return the `BatchExportWriter` corresponding to configured `file_format`. - - Raises: - UnsupportedFileFormatError: If no writer exists for given `file_format`. - """ - writer: BatchExportWriter - - if inputs.file_format == "Parquet": - writer = ParquetBatchExportWriter( - max_bytes=max_bytes, - flush_callable=flush_callable, - compression=inputs.compression, - schema=schema, - ) - elif inputs.file_format == "JSONLines": - writer = JSONLBatchExportWriter( - max_bytes=max_bytes, - flush_callable=flush_callable, - compression=inputs.compression, - ) - else: - raise UnsupportedFileFormatError(inputs.file_format, "S3") - - return writer - - @workflow.defn(name="s3-export", failure_exception_types=[workflow.NondeterminismError]) class S3BatchExportWorkflow(PostHogWorkflow): """A Temporal Workflow to export ClickHouse data into S3. @@ -789,19 +796,6 @@ async def run(self, inputs: S3BatchExportInputs): insert_into_s3_activity, insert_inputs, interval=inputs.interval, - non_retryable_error_types=[ - # S3 parameter validation failed. - "ParamValidationError", - # This error usually indicates credentials are incorrect or permissions are missing. - "ClientError", - # An S3 bucket doesn't exist. - "NoSuchBucket", - # Couldn't connect to custom S3 endpoint - "EndpointConnectionError", - # Input contained an empty S3 endpoint URL - "EmptyS3EndpointURLError", - # User provided an invalid S3 key - "InvalidS3Key", - ], + non_retryable_error_types=NON_RETRYABLE_ERROR_TYPES, finish_inputs=finish_inputs, ) diff --git a/posthog/temporal/batch_exports/spmc.py b/posthog/temporal/batch_exports/spmc.py new file mode 100644 index 0000000000000..34a503646a3e4 --- /dev/null +++ b/posthog/temporal/batch_exports/spmc.py @@ -0,0 +1,631 @@ +import abc +import asyncio +import collections.abc +import datetime as dt +import operator +import typing +import uuid + +import pyarrow as pa +import structlog +import temporalio.common +from django.conf import settings + +from posthog.temporal.batch_exports.heartbeat import BatchExportRangeHeartbeatDetails +from posthog.temporal.batch_exports.metrics import get_bytes_exported_metric, get_rows_exported_metric +from posthog.temporal.batch_exports.sql import ( + SELECT_FROM_EVENTS_VIEW, + SELECT_FROM_EVENTS_VIEW_BACKFILL, + SELECT_FROM_EVENTS_VIEW_RECENT, + SELECT_FROM_EVENTS_VIEW_UNBOUNDED, + SELECT_FROM_PERSONS_VIEW, + SELECT_FROM_PERSONS_VIEW_BACKFILL, + SELECT_FROM_PERSONS_VIEW_BACKFILL_NEW, + SELECT_FROM_PERSONS_VIEW_NEW, +) +from posthog.temporal.batch_exports.temporary_file import ( + BatchExportTemporaryFile, + BytesSinceLastFlush, + DateRange, + FlushCounter, + IsLast, + RecordsSinceLastFlush, + WriterFormat, + get_batch_export_writer, +) +from posthog.temporal.batch_exports.utils import ( + cast_record_batch_json_columns, + cast_record_batch_schema_json_columns, +) +from posthog.temporal.common.clickhouse import ClickHouseClient +from posthog.temporal.common.heartbeat import Heartbeater + +logger = structlog.get_logger() + + +class RecordBatchQueue(asyncio.Queue): + """A queue of pyarrow RecordBatch instances limited by bytes.""" + + def __init__(self, max_size_bytes: int = 0) -> None: + super().__init__(maxsize=max_size_bytes) + self._bytes_size = 0 + self._schema_set = asyncio.Event() + self.record_batch_schema = None + # This is set by `asyncio.Queue.__init__` calling `_init` + self._queue: collections.deque + + def _get(self) -> pa.RecordBatch: + """Override parent `_get` to keep track of bytes.""" + item = self._queue.popleft() + self._bytes_size -= item.get_total_buffer_size() + return item + + def _put(self, item: pa.RecordBatch) -> None: + """Override parent `_put` to keep track of bytes.""" + self._bytes_size += item.get_total_buffer_size() + + if not self._schema_set.is_set(): + self.set_schema(item) + + self._queue.append(item) + + def set_schema(self, record_batch: pa.RecordBatch) -> None: + """Used to keep track of schema of events in queue.""" + self.record_batch_schema = record_batch.schema + self._schema_set.set() + + async def get_schema(self) -> pa.Schema: + """Return the schema of events in queue. + + Currently, this is not enforced. It's purely for reporting to users of + the queue what do the record batches look like. It's up to the producer + to ensure all record batches have the same schema. + """ + await self._schema_set.wait() + return self.record_batch_schema + + def qsize(self) -> int: + """Size in bytes of record batches in the queue. + + This is used to determine when the queue is full, so it returns the + number of bytes. + """ + return self._bytes_size + + +class TaskNotDoneError(Exception): + """Raised when a task that should be done, isn't.""" + + def __init__(self, task: str): + super().__init__(f"Expected task '{task}' to be done by now") + + +class RecordBatchTaskError(Exception): + """Raised when an error occurs during consumption of record batches.""" + + def __init__(self): + super().__init__("The record batch consumer encountered an error during execution") + + +async def raise_on_task_failure(task: asyncio.Task) -> None: + """Raise `RecordBatchProducerError` if a producer task failed. + + We will also raise a `TaskNotDone` if the producer is not done, as this + should only be called after producer is done to check its exception. + """ + if not task.done(): + raise TaskNotDoneError(task.get_name()) + + if task.exception() is None: + return + + exc = task.exception() + await logger.aexception("%s task failed", task.get_name(), exc_info=exc) + raise RecordBatchTaskError() from exc + + +async def wait_for_schema_or_producer(queue: RecordBatchQueue, producer_task: asyncio.Task) -> pa.Schema | None: + """Wait for a queue schema to be set or a producer to finish. + + If the queue's schema is set first, we will return that, otherwise we return + `None`. + + A queue's schema will be set sequentially on the first record batch produced. + So, after waiting for both tasks, either we finished setting the schema and + have partially or fully produced record batches, or we finished without putting + anything in the queue, and the queue's schema has not been set. + """ + record_batch_schema = None + + get_schema_task = asyncio.create_task(queue.get_schema()) + + await asyncio.wait( + [get_schema_task, producer_task], + return_when=asyncio.FIRST_COMPLETED, + ) + + if get_schema_task.done(): + # The schema is available, and the queue is not empty, so we can continue + # with the rest of the the batch export. + record_batch_schema = get_schema_task.result() + else: + # We finished producing without putting anything in the queue and there is + # nothing to batch export. We could have also failed, so we need to re-raise + # that exception to allow a retry if that's the case. If we don't fail, it + # is safe to finish the batch export early. + await raise_on_task_failure(producer_task) + + return record_batch_schema + + +class Consumer: + """Async consumer for batch exports. + + Attributes: + flush_start_event: Event set when this consumer's flush method starts. + heartbeater: A batch export's heartbeater used for tracking progress. + heartbeat_details: A batch export's heartbeat details passed to the + heartbeater used for tracking progress. + data_interval_start: The beginning of the batch export period. + logger: Provided consumer logger. + """ + + def __init__( + self, + heartbeater: Heartbeater, + heartbeat_details: BatchExportRangeHeartbeatDetails, + data_interval_start: dt.datetime | str | None, + ): + self.flush_start_event = asyncio.Event() + self.heartbeater = heartbeater + self.heartbeat_details = heartbeat_details + self.data_interval_start = data_interval_start + self.logger = logger + + @property + def rows_exported_counter(self) -> temporalio.common.MetricCounter: + """Access the rows exported metric counter.""" + return get_rows_exported_metric() + + @property + def bytes_exported_counter(self) -> temporalio.common.MetricCounter: + """Access the bytes exported metric counter.""" + return get_bytes_exported_metric() + + @abc.abstractmethod + async def flush( + self, + batch_export_file: BatchExportTemporaryFile, + records_since_last_flush: RecordsSinceLastFlush, + bytes_since_last_flush: BytesSinceLastFlush, + flush_counter: FlushCounter, + last_date_range: DateRange, + is_last: IsLast, + error: Exception | None, + ): + """Method called on reaching `max_bytes` when running the consumer. + + Each batch export should override this method with their own implementation + of flushing, as each destination will have different requirements for + flushing data. + + Arguments: + batch_export_file: The temporary file containing data to flush. + records_since_last_flush: How many records were written in the temporary + file. + bytes_since_last_flush: How many records were written in the temporary + file. + error: If any error occurs while writing the temporary file. + """ + pass + + async def start( + self, + queue: RecordBatchQueue, + producer_task: asyncio.Task, + writer_format: WriterFormat, + max_bytes: int, + schema: pa.Schema, + json_columns: collections.abc.Sequence[str], + **kwargs, + ) -> int: + """Start consuming record batches from queue. + + Record batches will be written to a temporary file defined by `writer_format` + and the file will be flushed upon reaching at least `max_bytes`. + + Returns: + Total number of records in all consumed record batches. + """ + await logger.adebug("Starting record batch consumer") + + schema = cast_record_batch_schema_json_columns(schema, json_columns=json_columns) + writer = get_batch_export_writer(writer_format, self.flush, schema=schema, max_bytes=max_bytes, **kwargs) + + record_batches_count = 0 + + async with writer.open_temporary_file(): + await self.logger.adebug("Starting record batch writing loop") + while True: + try: + record_batch = queue.get_nowait() + record_batches_count += 1 + except asyncio.QueueEmpty: + if producer_task.done(): + await self.logger.adebug( + "Empty queue with no more events being produced, closing writer loop and flushing" + ) + self.flush_start_event.set() + # Exit context manager to trigger flush + break + else: + await asyncio.sleep(0.1) + continue + + record_batch = cast_record_batch_json_columns(record_batch, json_columns=json_columns) + await writer.write_record_batch(record_batch, flush=False) + + if writer.should_flush(): + await self.logger.adebug("Writer finished, ready to flush events") + self.flush_start_event.set() + # Exit context manager to trigger flush + break + + for _ in range(record_batches_count): + queue.task_done() + + await self.logger.adebug("Consumed %s records", writer.records_total) + self.heartbeater.set_from_heartbeat_details(self.heartbeat_details) + return writer.records_total + + +class RecordBatchConsumerRetryableExceptionGroup(ExceptionGroup): + """ExceptionGroup raised when at least one task fails with a retryable exception.""" + + def derive(self, excs): + return RecordBatchConsumerRetryableExceptionGroup(self.message, excs) + + +class RecordBatchConsumerNonRetryableExceptionGroup(ExceptionGroup): + """ExceptionGroup raised when all tasks fail with non-retryable exception.""" + + def derive(self, excs): + return RecordBatchConsumerNonRetryableExceptionGroup(self.message, excs) + + +async def run_consumer_loop( + queue: RecordBatchQueue, + consumer_cls: type[Consumer], + producer_task: asyncio.Task, + heartbeater: Heartbeater, + heartbeat_details: BatchExportRangeHeartbeatDetails, + data_interval_end: dt.datetime | str, + data_interval_start: dt.datetime | str | None, + schema: pa.Schema, + writer_format: WriterFormat, + max_bytes: int, + json_columns: collections.abc.Sequence[str] = ("properties", "person_properties", "set", "set_once"), + writer_file_kwargs: collections.abc.Mapping[str, typing.Any] | None = None, + non_retryable_error_types: collections.abc.Sequence[str] = (), + **kwargs, +) -> int: + """Run record batch consumers in a loop. + + When a consumer starts flushing, a new consumer will be started, and so on in + a loop. Once there is nothing left to consumer from the `RecordBatchQueue`, no + more consumers will be started, and any pending consumers are awaited. + + Returns: + Number of records exported. Not the number of record batches, but the + number of records in all record batches. + + Raises: + RecordBatchConsumerRetryableExceptionGroup: When at least one consumer task + fails with a retryable error. + RecordBatchConsumerNonRetryableExceptionGroup: When all consumer tasks fail + with non-retryable errors. + """ + consumer_tasks_pending: set[asyncio.Task] = set() + consumer_tasks_done = set() + consumer_number = 0 + records_completed = 0 + + def consumer_done_callback(task: asyncio.Task): + nonlocal records_completed + nonlocal consumer_tasks_done + nonlocal consumer_tasks_pending + + try: + records_completed += task.result() + except: + pass + + consumer_tasks_pending.remove(task) + consumer_tasks_done.add(task) + + await logger.adebug("Starting record batch consumer loop") + while not queue.empty() or not producer_task.done(): + consumer = consumer_cls(heartbeater, heartbeat_details, data_interval_start, **kwargs) + consumer_task = asyncio.create_task( + consumer.start( + queue=queue, + producer_task=producer_task, + writer_format=writer_format, + max_bytes=max_bytes, + schema=schema, + json_columns=json_columns, + **writer_file_kwargs or {}, + ), + name=f"record_batch_consumer_{consumer_number}", + ) + consumer_tasks_pending.add(consumer_task) + consumer_task.add_done_callback(consumer_done_callback) + consumer_number += 1 + + while not consumer.flush_start_event.is_set() and not consumer_task.done(): + # Block until we either start flushing or we are done consuming. + # Flush start should always happen first unless the consumer task fails. + await asyncio.sleep(0) + + if consumer_task.done(): + consumer_task_exception = consumer_task.exception() + + if consumer_task_exception is not None: + raise consumer_task_exception + + await logger.adebug("Finished producing, now waiting on any pending consumer tasks") + if consumer_tasks_pending: + await asyncio.wait(consumer_tasks_pending) + + retryable = [] + non_retryable = [] + for task in consumer_tasks_done: + try: + await raise_on_task_failure(task) + + except Exception as e: + # TODO: Handle exception types instead of checking for exception names. + # We are losing some precision by not handling exception types with + # `except`, but using a sequence of strings keeps us in line with + # Temporal. Not a good reason though, but right now we would need to + # search for a handful of exception types, so this is a quicker tradeoff + # as we already have the list of strings for each destination. + if e.__class__.__name__ in non_retryable_error_types: + await logger.aexception("Consumer task %s has failed with a non-retryable %s", task, e, exc_info=e) + non_retryable.append(e) + + else: + await logger.aexception("Consumer task %s has failed with a retryable %s", task, e, exc_info=e) + retryable.append(e) + + if retryable: + raise RecordBatchConsumerRetryableExceptionGroup( + "At least one unhandled retryable errors in a RecordBatch consumer TaskGroup", retryable + non_retryable + ) + elif non_retryable: + raise RecordBatchConsumerNonRetryableExceptionGroup( + "Unhandled non-retryable errors in a RecordBatch consumer TaskGroup", retryable + non_retryable + ) + + await raise_on_task_failure(producer_task) + await logger.adebug("Successfully consumed all record batches") + + heartbeat_details.complete_done_ranges(data_interval_end) + heartbeater.set_from_heartbeat_details(heartbeat_details) + + return records_completed + + +class BatchExportField(typing.TypedDict): + """A field to be queried from ClickHouse. + + Attributes: + expression: A ClickHouse SQL expression that declares the field required. + alias: An alias to apply to the expression (after an 'AS' keyword). + """ + + expression: str + alias: str + + +def default_fields() -> list[BatchExportField]: + """Return list of default batch export Fields.""" + return [ + BatchExportField(expression="uuid", alias="uuid"), + BatchExportField(expression="team_id", alias="team_id"), + BatchExportField(expression="timestamp", alias="timestamp"), + BatchExportField(expression="_inserted_at", alias="_inserted_at"), + BatchExportField(expression="created_at", alias="created_at"), + BatchExportField(expression="event", alias="event"), + BatchExportField(expression="properties", alias="properties"), + BatchExportField(expression="distinct_id", alias="distinct_id"), + BatchExportField(expression="set", alias="set"), + BatchExportField( + expression="set_once", + alias="set_once", + ), + ] + + +class Producer: + """Async producer for batch exports. + + Attributes: + clickhouse_client: ClickHouse client used to produce RecordBatches. + _task: Used to keep track of producer background task. + """ + + def __init__(self, clickhouse_client: ClickHouseClient): + self.clickhouse_client = clickhouse_client + self._task: asyncio.Task | None = None + + @property + def task(self) -> asyncio.Task: + if self._task is None: + raise ValueError("Producer task is not initialized, have you called `Producer.start()`?") + return self._task + + def start( + self, + queue: RecordBatchQueue, + model_name: str, + is_backfill: bool, + team_id: int, + full_range: tuple[dt.datetime | None, dt.datetime], + done_ranges: list[tuple[dt.datetime, dt.datetime]], + fields: list[BatchExportField] | None = None, + destination_default_fields: list[BatchExportField] | None = None, + use_latest_schema: bool = False, + **parameters, + ) -> asyncio.Task: + if fields is None: + if destination_default_fields is None: + fields = default_fields() + else: + fields = destination_default_fields + + if model_name == "persons": + if is_backfill and full_range[0] is None: + if use_latest_schema: + query = SELECT_FROM_PERSONS_VIEW_BACKFILL_NEW + else: + query = SELECT_FROM_PERSONS_VIEW_BACKFILL + else: + if use_latest_schema: + query = SELECT_FROM_PERSONS_VIEW_NEW + else: + query = SELECT_FROM_PERSONS_VIEW + else: + if parameters.get("exclude_events", None): + parameters["exclude_events"] = list(parameters["exclude_events"]) + else: + parameters["exclude_events"] = [] + + if parameters.get("include_events", None): + parameters["include_events"] = list(parameters["include_events"]) + else: + parameters["include_events"] = [] + + start_at, end_at = full_range + + if start_at: + is_5_min_batch_export = (end_at - start_at) == dt.timedelta(seconds=300) + else: + is_5_min_batch_export = False + + if is_5_min_batch_export and not is_backfill: + query_template = SELECT_FROM_EVENTS_VIEW_RECENT + elif str(team_id) in settings.UNCONSTRAINED_TIMESTAMP_TEAM_IDS: + query_template = SELECT_FROM_EVENTS_VIEW_UNBOUNDED + elif is_backfill: + query_template = SELECT_FROM_EVENTS_VIEW_BACKFILL + else: + query_template = SELECT_FROM_EVENTS_VIEW + lookback_days = settings.OVERRIDE_TIMESTAMP_TEAM_IDS.get( + team_id, settings.DEFAULT_TIMESTAMP_LOOKBACK_DAYS + ) + parameters["lookback_days"] = lookback_days + + if "_inserted_at" not in [field["alias"] for field in fields]: + control_fields = [BatchExportField(expression="_inserted_at", alias="_inserted_at")] + else: + control_fields = [] + + query_fields = ",".join(f"{field['expression']} AS {field['alias']}" for field in fields + control_fields) + + query = query_template.substitute(fields=query_fields) + + parameters["team_id"] = team_id + + extra_query_parameters = parameters.pop("extra_query_parameters", {}) or {} + parameters = {**parameters, **extra_query_parameters} + + self._task = asyncio.create_task( + self.produce_batch_export_record_batches_from_range( + query=query, full_range=full_range, done_ranges=done_ranges, queue=queue, query_parameters=parameters + ), + name="record_batch_producer", + ) + + return self.task + + async def produce_batch_export_record_batches_from_range( + self, + query: str, + full_range: tuple[dt.datetime | None, dt.datetime], + done_ranges: collections.abc.Sequence[tuple[dt.datetime, dt.datetime]], + queue: RecordBatchQueue, + query_parameters: dict[str, typing.Any], + ): + for interval_start, interval_end in generate_query_ranges(full_range, done_ranges): + if interval_start is not None: + query_parameters["interval_start"] = interval_start.strftime("%Y-%m-%d %H:%M:%S.%f") + query_parameters["interval_end"] = interval_end.strftime("%Y-%m-%d %H:%M:%S.%f") + query_id = uuid.uuid4() + + await self.clickhouse_client.aproduce_query_as_arrow_record_batches( + query, queue=queue, query_parameters=query_parameters, query_id=str(query_id) + ) + + +def generate_query_ranges( + remaining_range: tuple[dt.datetime | None, dt.datetime], + done_ranges: collections.abc.Sequence[tuple[dt.datetime, dt.datetime]], +) -> typing.Iterator[tuple[dt.datetime | None, dt.datetime]]: + """Recursively yield ranges of dates that need to be queried. + + There are essentially 3 scenarios we are expecting: + 1. The batch export just started, so we expect `done_ranges` to be an empty + list, and thus should return the `remaining_range`. + 2. The batch export crashed mid-execution, so we have some `done_ranges` that + do not completely add up to the full range. In this case we need to yield + ranges in between all the done ones. + 3. The batch export crashed right after we finish, so we have a full list of + `done_ranges` adding up to the `remaining_range`. In this case we should not + yield anything. + + Case 1 is fairly trivial and we can simply return `remaining_range` if we get + an empty `done_ranges`. + + Case 2 is more complicated and we can expect that the ranges produced by this + function will lead to duplicate events selected, as our batch export query is + inclusive in the lower bound. Since multiple rows may have the same + `inserted_at` we cannot simply skip an `inserted_at` value, as there may be a + row that hasn't been exported as it with the same `inserted_at` as a row that + has been exported. So this function will return ranges with `inserted_at` + values that were already exported for at least one event. Ideally, this is + *only* one event, but we can never be certain. + """ + if len(done_ranges) == 0: + yield remaining_range + return + + epoch = dt.datetime.fromtimestamp(0, tz=dt.UTC) + list_done_ranges: list[tuple[dt.datetime, dt.datetime]] = list(done_ranges) + + list_done_ranges.sort(key=operator.itemgetter(0)) + + while True: + try: + next_range: tuple[dt.datetime | None, dt.datetime] = list_done_ranges.pop(0) + except IndexError: + if remaining_range[0] != remaining_range[1]: + # If they were equal it would mean we have finished. + yield remaining_range + + return + else: + candidate_end_at = next_range[0] if next_range[0] is not None else epoch + + candidate_start_at = remaining_range[0] + remaining_range = (next_range[1], remaining_range[1]) + + if candidate_start_at is not None and candidate_start_at >= candidate_end_at: + # We have landed within a done range. + continue + + if candidate_start_at is None and candidate_end_at == epoch: + # We have landed within the first done range of a backfill. + continue + + yield (candidate_start_at, candidate_end_at) diff --git a/posthog/temporal/batch_exports/sql.py b/posthog/temporal/batch_exports/sql.py new file mode 100644 index 0000000000000..7cb3922268ead --- /dev/null +++ b/posthog/temporal/batch_exports/sql.py @@ -0,0 +1,173 @@ +from string import Template + +SELECT_FROM_PERSONS_VIEW = """ +SELECT + persons.team_id AS team_id, + persons.distinct_id AS distinct_id, + persons.person_id AS person_id, + persons.properties AS properties, + persons.person_distinct_id_version AS person_distinct_id_version, + persons.person_version AS person_version, + persons._inserted_at AS _inserted_at +FROM + persons_batch_export( + team_id={team_id}, + interval_start={interval_start}, + interval_end={interval_end} + ) AS persons +FORMAT ArrowStream +SETTINGS + max_bytes_before_external_group_by=50000000000, + max_bytes_before_external_sort=50000000000, + optimize_aggregation_in_order=1 +""" + +# This is an updated version of the view that we will use going forward +# We will migrate each batch export destination over one at a time to migitate +# risk, and once this is done we can clean this up. +SELECT_FROM_PERSONS_VIEW_NEW = """ +SELECT + persons.team_id AS team_id, + persons.distinct_id AS distinct_id, + persons.person_id AS person_id, + persons.properties AS properties, + persons.person_distinct_id_version AS person_distinct_id_version, + persons.person_version AS person_version, + persons.created_at AS created_at, + persons._inserted_at AS _inserted_at +FROM + persons_batch_export( + team_id={team_id}, + interval_start={interval_start}, + interval_end={interval_end} + ) AS persons +FORMAT ArrowStream +SETTINGS + max_bytes_before_external_group_by=50000000000, + max_bytes_before_external_sort=50000000000, + optimize_aggregation_in_order=1 +""" + +SELECT_FROM_PERSONS_VIEW_BACKFILL = """ +SELECT + persons.team_id AS team_id, + persons.distinct_id AS distinct_id, + persons.person_id AS person_id, + persons.properties AS properties, + persons.person_distinct_id_version AS person_distinct_id_version, + persons.person_version AS person_version, + persons._inserted_at AS _inserted_at +FROM + persons_batch_export_backfill( + team_id={team_id}, + interval_end={interval_end} + ) AS persons +FORMAT ArrowStream +SETTINGS + max_bytes_before_external_group_by=50000000000, + max_bytes_before_external_sort=50000000000, + optimize_aggregation_in_order=1 +""" + +# This is an updated version of the view that we will use going forward +# We will migrate each batch export destination over one at a time to migitate +# risk, and once this is done we can clean this up. +SELECT_FROM_PERSONS_VIEW_BACKFILL_NEW = """ +SELECT + persons.team_id AS team_id, + persons.distinct_id AS distinct_id, + persons.person_id AS person_id, + persons.properties AS properties, + persons.person_distinct_id_version AS person_distinct_id_version, + persons.person_version AS person_version, + persons.created_at AS created_at, + persons._inserted_at AS _inserted_at +FROM + persons_batch_export_backfill( + team_id={team_id}, + interval_end={interval_end} + ) AS persons +FORMAT ArrowStream +SETTINGS + max_bytes_before_external_group_by=50000000000, + max_bytes_before_external_sort=50000000000, + optimize_aggregation_in_order=1 +""" + +SELECT_FROM_EVENTS_VIEW = Template( + """ +SELECT + $fields +FROM + events_batch_export( + team_id={team_id}, + lookback_days={lookback_days}, + interval_start={interval_start}, + interval_end={interval_end}, + include_events={include_events}::Array(String), + exclude_events={exclude_events}::Array(String) + ) AS events +FORMAT ArrowStream +SETTINGS + -- This is half of configured MAX_MEMORY_USAGE for batch exports. + max_bytes_before_external_sort=50000000000 +""" +) + +SELECT_FROM_EVENTS_VIEW_RECENT = Template( + """ +SELECT + $fields +FROM + events_batch_export_recent( + team_id={team_id}, + interval_start={interval_start}, + interval_end={interval_end}, + include_events={include_events}::Array(String), + exclude_events={exclude_events}::Array(String) + ) AS events +FORMAT ArrowStream +SETTINGS + -- This is half of configured MAX_MEMORY_USAGE for batch exports. + max_bytes_before_external_sort=50000000000, + max_replica_delay_for_distributed_queries=1 +""" +) + +SELECT_FROM_EVENTS_VIEW_UNBOUNDED = Template( + """ +SELECT + $fields +FROM + events_batch_export_unbounded( + team_id={team_id}, + interval_start={interval_start}, + interval_end={interval_end}, + include_events={include_events}::Array(String), + exclude_events={exclude_events}::Array(String) + ) AS events +FORMAT ArrowStream +SETTINGS + -- This is half of configured MAX_MEMORY_USAGE for batch exports. + max_bytes_before_external_sort=50000000000 +""" +) + +SELECT_FROM_EVENTS_VIEW_BACKFILL = Template( + """ +SELECT + $fields +FROM + events_batch_export_backfill( + team_id={team_id}, + interval_start={interval_start}, + interval_end={interval_end}, + include_events={include_events}::Array(String), + exclude_events={exclude_events}::Array(String) + ) AS events +FORMAT ArrowStream +SETTINGS + -- This is half of configured MAX_MEMORY_USAGE for batch exports. + max_bytes_before_external_sort=50000000000 +""" +) diff --git a/posthog/temporal/batch_exports/temporary_file.py b/posthog/temporal/batch_exports/temporary_file.py index 54beae9f9b1d5..c6a30ebc93a1b 100644 --- a/posthog/temporal/batch_exports/temporary_file.py +++ b/posthog/temporal/batch_exports/temporary_file.py @@ -6,6 +6,7 @@ import contextlib import csv import datetime as dt +import enum import gzip import json import tempfile @@ -466,6 +467,48 @@ async def flush(self, is_last: bool = False) -> None: self.end_at_since_last_flush = None +class WriterFormat(enum.StrEnum): + JSONL = enum.auto() + PARQUET = enum.auto() + CSV = enum.auto() + + @staticmethod + def from_str(format_str: str, destination: str): + match format_str.upper(): + case "JSONL" | "JSONLINES": + return WriterFormat.JSONL + case "PARQUET": + return WriterFormat.PARQUET + case "CSV": + return WriterFormat.CSV + case _: + raise UnsupportedFileFormatError(format_str, destination) + + +def get_batch_export_writer(writer_format: WriterFormat, flush_callable: FlushCallable, max_bytes: int, **kwargs): + match writer_format: + case WriterFormat.CSV: + return CSVBatchExportWriter( + max_bytes=max_bytes, + flush_callable=flush_callable, + **kwargs, + ) + + case WriterFormat.JSONL: + return JSONLBatchExportWriter( + max_bytes=max_bytes, + flush_callable=flush_callable, + **kwargs, + ) + + case WriterFormat.PARQUET: + return ParquetBatchExportWriter( + max_bytes=max_bytes, + flush_callable=flush_callable, + **kwargs, + ) + + class JSONLBatchExportWriter(BatchExportWriter): """A `BatchExportWriter` for JSONLines format. @@ -478,6 +521,7 @@ def __init__( self, max_bytes: int, flush_callable: FlushCallable, + schema: pa.Schema | None = None, compression: None | str = None, default: typing.Callable = str, ): @@ -549,6 +593,7 @@ def __init__( max_bytes: int, flush_callable: FlushCallable, field_names: collections.abc.Sequence[str], + schema: pa.Schema | None = None, extras_action: typing.Literal["raise", "ignore"] = "ignore", delimiter: str = ",", quote_char: str = '"', diff --git a/posthog/temporal/batch_exports/utils.py b/posthog/temporal/batch_exports/utils.py index c54e983795838..d9bbda0657ef1 100644 --- a/posthog/temporal/batch_exports/utils.py +++ b/posthog/temporal/batch_exports/utils.py @@ -191,7 +191,7 @@ def __arrow_ext_scalar_class__(self): def cast_record_batch_json_columns( record_batch: pa.RecordBatch, - json_columns: collections.abc.Sequence = ("properties", "person_properties", "set", "set_once"), + json_columns: collections.abc.Sequence[str] = ("properties", "person_properties", "set", "set_once"), ) -> pa.RecordBatch: """Cast json_columns in record_batch to JsonType. @@ -215,6 +215,27 @@ def cast_record_batch_json_columns( ) +def cast_record_batch_schema_json_columns( + schema: pa.Schema, + json_columns: collections.abc.Sequence[str] = ("properties", "person_properties", "set", "set_once"), +): + column_names = set(schema.names) + intersection = column_names & set(json_columns) + new_fields = [] + + for field in schema: + if field.name not in intersection or not pa.types.is_string(field.type): + new_fields.append(field) + continue + + casted_field = field.with_type(JsonType()) + new_fields.append(casted_field) + + new_schema = pa.schema(new_fields) + + return new_schema + + _Result = typing.TypeVar("_Result") FutureLike = ( asyncio.Future[_Result] | collections.abc.Coroutine[None, typing.Any, _Result] | collections.abc.Awaitable[_Result] diff --git a/posthog/temporal/common/worker.py b/posthog/temporal/common/worker.py index 2e7118c7934ae..2c78aae3f4a8c 100644 --- a/posthog/temporal/common/worker.py +++ b/posthog/temporal/common/worker.py @@ -24,7 +24,7 @@ async def start_worker( max_concurrent_workflow_tasks=None, max_concurrent_activities=None, ): - runtime = Runtime(telemetry=TelemetryConfig(metrics=PrometheusConfig(bind_address="0.0.0.0:%d" % metrics_port))) + runtime = Runtime(telemetry=TelemetryConfig(metrics=PrometheusConfig(bind_address=f"0.0.0.0:{metrics_port:d}"))) client = await connect( host, port, diff --git a/posthog/temporal/data_modeling/run_workflow.py b/posthog/temporal/data_modeling/run_workflow.py index f6f35bb9a67b3..eab0782b6c4fd 100644 --- a/posthog/temporal/data_modeling/run_workflow.py +++ b/posthog/temporal/data_modeling/run_workflow.py @@ -23,7 +23,7 @@ from django.conf import settings from dlt.common.libs.deltalake import get_delta_tables -from posthog.hogql.constants import HogQLGlobalSettings +from posthog.hogql.constants import HogQLGlobalSettings, LimitContext from posthog.hogql.database.database import create_hogql_database from posthog.hogql.query import execute_hogql_query from posthog.models import Team @@ -347,7 +347,9 @@ def hogql_table(query: str, team: Team, table_name: str, table_columns: dlt_typi async def get_hogql_rows(): settings = HogQLGlobalSettings(max_execution_time=60 * 10) # 10 mins, same as the /query endpoint async workers - response = await asyncio.to_thread(execute_hogql_query, query, team, settings=settings) + response = await asyncio.to_thread( + execute_hogql_query, query, team, settings=settings, limit_context=LimitContext.SAVED_QUERY + ) if not response.columns: raise EmptyHogQLResponseColumnsError() diff --git a/posthog/temporal/tests/batch_exports/test_batch_exports.py b/posthog/temporal/tests/batch_exports/test_batch_exports.py index b8236af8322c9..1303cdb178399 100644 --- a/posthog/temporal/tests/batch_exports/test_batch_exports.py +++ b/posthog/temporal/tests/batch_exports/test_batch_exports.py @@ -4,14 +4,12 @@ import operator from random import randint -import pyarrow as pa import pytest from django.test import override_settings from posthog.batch_exports.service import BatchExportModel from posthog.temporal.batch_exports.batch_exports import ( RecordBatchProducerError, - RecordBatchQueue, TaskNotDoneError, generate_query_ranges, get_data_interval, @@ -743,57 +741,6 @@ async def test_start_produce_batch_export_record_batches_handles_duplicates(clic assert_records_match_events(records, events) -async def test_record_batch_queue_tracks_bytes(): - """Test `RecordBatchQueue` tracks bytes from `RecordBatch`.""" - records = [{"test": 1}, {"test": 2}, {"test": 3}] - record_batch = pa.RecordBatch.from_pylist(records) - - queue = RecordBatchQueue() - - await queue.put(record_batch) - assert record_batch.get_total_buffer_size() == queue.qsize() - - item = await queue.get() - - assert item == record_batch - assert queue.qsize() == 0 - - -async def test_record_batch_queue_raises_queue_full(): - """Test `QueueFull` is raised when we put too many bytes.""" - records = [{"test": 1}, {"test": 2}, {"test": 3}] - record_batch = pa.RecordBatch.from_pylist(records) - record_batch_size = record_batch.get_total_buffer_size() - - queue = RecordBatchQueue(max_size_bytes=record_batch_size) - - await queue.put(record_batch) - assert record_batch.get_total_buffer_size() == queue.qsize() - - with pytest.raises(asyncio.QueueFull): - queue.put_nowait(record_batch) - - item = await queue.get() - - assert item == record_batch - assert queue.qsize() == 0 - - -async def test_record_batch_queue_sets_schema(): - """Test `RecordBatchQueue` sets a schema from first `RecordBatch`.""" - records = [{"test": 1}, {"test": 2}, {"test": 3}] - record_batch = pa.RecordBatch.from_pylist(records) - - queue = RecordBatchQueue() - - await queue.put(record_batch) - - assert queue._schema_set.is_set() - - schema = await queue.get_schema() - assert schema == record_batch.schema - - async def test_raise_on_produce_task_failure_raises_record_batch_producer_error(): """Test a `RecordBatchProducerError` is raised with the right cause.""" cause = ValueError("Oh no!") diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index d5d8d26b40373..979869c4b31d7 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -28,10 +28,10 @@ ) from posthog.temporal.batch_exports.s3_batch_export import ( FILE_FORMAT_EXTENSIONS, - S3HeartbeatDetails, IntermittentUploadPartTimeoutError, S3BatchExportInputs, S3BatchExportWorkflow, + S3HeartbeatDetails, S3InsertInputs, S3MultiPartUpload, get_s3_key, @@ -1589,7 +1589,7 @@ def __init__(self, *args, **kwargs): assert run.records_completed is None assert ( run.latest_error - == "IntermittentUploadPartTimeoutError: An intermittent `RequestTimeout` was raised while attempting to upload part 1" + == "RecordBatchConsumerRetryableExceptionGroup: At least one unhandled retryable errors in a RecordBatch consumer TaskGroup (1 sub-exception)" ) run = runs[1] diff --git a/posthog/temporal/tests/batch_exports/test_spmc.py b/posthog/temporal/tests/batch_exports/test_spmc.py new file mode 100644 index 0000000000000..7fd41dc15de28 --- /dev/null +++ b/posthog/temporal/tests/batch_exports/test_spmc.py @@ -0,0 +1,131 @@ +import asyncio +import datetime as dt +import random + +import pyarrow as pa +import pytest + +from posthog.temporal.batch_exports.spmc import Producer, RecordBatchQueue +from posthog.temporal.tests.utils.events import generate_test_events_in_clickhouse + +pytestmark = [pytest.mark.asyncio, pytest.mark.django_db] + + +async def test_record_batch_queue_tracks_bytes(): + """Test `RecordBatchQueue` tracks bytes from `RecordBatch`.""" + records = [{"test": 1}, {"test": 2}, {"test": 3}] + record_batch = pa.RecordBatch.from_pylist(records) + + queue = RecordBatchQueue() + + await queue.put(record_batch) + assert record_batch.get_total_buffer_size() == queue.qsize() + + item = await queue.get() + + assert item == record_batch + assert queue.qsize() == 0 + + +async def test_record_batch_queue_raises_queue_full(): + """Test `QueueFull` is raised when we put too many bytes.""" + records = [{"test": 1}, {"test": 2}, {"test": 3}] + record_batch = pa.RecordBatch.from_pylist(records) + record_batch_size = record_batch.get_total_buffer_size() + + queue = RecordBatchQueue(max_size_bytes=record_batch_size) + + await queue.put(record_batch) + assert record_batch.get_total_buffer_size() == queue.qsize() + + with pytest.raises(asyncio.QueueFull): + queue.put_nowait(record_batch) + + item = await queue.get() + + assert item == record_batch + assert queue.qsize() == 0 + + +async def test_record_batch_queue_sets_schema(): + """Test `RecordBatchQueue` sets a schema from first `RecordBatch`.""" + records = [{"test": 1}, {"test": 2}, {"test": 3}] + record_batch = pa.RecordBatch.from_pylist(records) + + queue = RecordBatchQueue() + + await queue.put(record_batch) + + assert queue._schema_set.is_set() + + schema = await queue.get_schema() + assert schema == record_batch.schema + + +async def get_record_batch_from_queue(queue, produce_task): + while not queue.empty() or not produce_task.done(): + try: + record_batch = queue.get_nowait() + except asyncio.QueueEmpty: + if produce_task.done(): + break + else: + await asyncio.sleep(0.1) + continue + + return record_batch + return None + + +async def get_all_record_batches_from_queue(queue, produce_task): + records = [] + while not queue.empty() or not produce_task.done(): + record_batch = await get_record_batch_from_queue(queue, produce_task) + if record_batch is None: + break + + for record in record_batch.to_pylist(): + records.append(record) + return records + + +async def test_record_batch_producer_uses_extra_query_parameters(clickhouse_client): + """Test RecordBatch Producer uses a HogQL value.""" + team_id = random.randint(1, 1000000) + data_interval_end = dt.datetime.fromisoformat("2023-04-25T14:31:00.000000+00:00") + data_interval_start = dt.datetime.fromisoformat("2023-04-25T14:30:00.000000+00:00") + + (events, _, _) = await generate_test_events_in_clickhouse( + client=clickhouse_client, + team_id=team_id, + start_time=data_interval_start, + end_time=data_interval_end, + count=10, + count_outside_range=0, + count_other_team=0, + duplicate=False, + properties={"$browser": "Chrome", "$os": "Mac OS X", "custom": 3}, + ) + + queue = RecordBatchQueue() + producer = Producer(clickhouse_client=clickhouse_client) + producer_task = producer.start( + queue=queue, + team_id=team_id, + is_backfill=False, + model_name="events", + full_range=(data_interval_start, data_interval_end), + done_ranges=[], + fields=[ + {"expression": "JSONExtractInt(properties, %(hogql_val_0)s)", "alias": "custom_prop"}, + ], + extra_query_parameters={"hogql_val_0": "custom"}, + ) + + records = await get_all_record_batches_from_queue(queue, producer_task) + + for expected, record in zip(events, records): + if expected["properties"] is None: + raise ValueError("Empty properties") + + assert record["custom_prop"] == expected["properties"]["custom"] diff --git a/posthog/test/base.py b/posthog/test/base.py index 701d4a509395f..43dcc0e130964 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -263,6 +263,15 @@ def clean_varying_query_parts(query, replace_all_numbers): "SELECT distinct_id, 1 as value", query, ) + + # rbac has some varying IDs we can replace + # e.g. AND "ee_accesscontrol"."resource_id" = '450' + query = re.sub( + r"\"resource_id\" = '\d+'", + "\"resource_id\" = '99999'", + query, + ) + return query @@ -309,7 +318,7 @@ def __eq__(self, other): return self.lowest <= other <= self.highest def __repr__(self): - return "[%d..%d]" % (self.lowest, self.highest) + return f"[{self.lowest:d}..{self.highest:d}]" class ErrorResponsesMixin: diff --git a/posthog/urls.py b/posthog/urls.py index 80a47cd42a5d8..210287c149392 100644 --- a/posthog/urls.py +++ b/posthog/urls.py @@ -210,6 +210,7 @@ def opt_slash_path(route: str, view: Callable, name: Optional[str] = None) -> UR sharing.SharingViewerPageViewSet.as_view({"get": "retrieve"}), ), path("site_app////", site_app.get_site_app), + path("site_function///", site_app.get_site_function), re_path(r"^demo.*", login_required(demo_route)), # ingestion # NOTE: When adding paths here that should be public make sure to update ALWAYS_ALLOWED_ENDPOINTS in middleware.py diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py index c9bbd5eefb946..02e3c68a77422 100644 --- a/posthog/warehouse/api/external_data_source.py +++ b/posthog/warehouse/api/external_data_source.py @@ -571,8 +571,7 @@ def _handle_sql_source(self, request: Request, *args: Any, **kwargs: Any) -> tup ssh_tunnel_auth_type_passphrase = ssh_tunnel_auth_type_obj.get("passphrase", None) ssh_tunnel_auth_type_private_key = ssh_tunnel_auth_type_obj.get("private_key", None) - use_ssl_obj = payload.get("use_ssl", {}) - using_ssl_str = use_ssl_obj.get("enabled", "1") + using_ssl_str = payload.get("use_ssl", "1") using_ssl = str_to_bool(using_ssl_str) if not self._validate_database_host(host, self.team_id, using_ssh_tunnel): @@ -934,8 +933,7 @@ def database_schema(self, request: Request, *arg: Any, **kwargs: Any): ssh_tunnel_auth_type_passphrase = ssh_tunnel_auth_type_obj.get("passphrase", None) ssh_tunnel_auth_type_private_key = ssh_tunnel_auth_type_obj.get("private_key", None) - use_ssl_obj = request.data.get("use_ssl", {}) - using_ssl_str = use_ssl_obj.get("enabled", "1") + using_ssl_str = request.data.get("use_ssl", "1") using_ssl = str_to_bool(using_ssl_str) ssh_tunnel = SSHTunnel( diff --git a/posthog/warehouse/api/saved_query.py b/posthog/warehouse/api/saved_query.py index 2d8ef156aa6b0..e9f3f65eeb191 100644 --- a/posthog/warehouse/api/saved_query.py +++ b/posthog/warehouse/api/saved_query.py @@ -19,7 +19,13 @@ from posthog.hogql.printer import print_ast from posthog.temporal.common.client import sync_connect from posthog.temporal.data_modeling.run_workflow import RunWorkflowInputs, Selector -from posthog.warehouse.models import DataWarehouseJoin, DataWarehouseModelPath, DataWarehouseSavedQuery +from posthog.warehouse.models import ( + CLICKHOUSE_HOGQL_MAPPING, + DataWarehouseJoin, + DataWarehouseModelPath, + DataWarehouseSavedQuery, + clean_type, +) import uuid @@ -47,7 +53,11 @@ class Meta: def get_columns(self, view: DataWarehouseSavedQuery) -> list[SerializedField]: team_id = self.context["team_id"] - context = HogQLContext(team_id=team_id, database=create_hogql_database(team_id=team_id)) + database = self.context.get("database", None) + if not database: + database = create_hogql_database(team_id=team_id) + + context = HogQLContext(team_id=team_id, database=database) fields = serialize_fields(view.hogql_definition().fields, context, view.name, table_type="external") return [ @@ -70,7 +80,20 @@ def create(self, validated_data): view = DataWarehouseSavedQuery(**validated_data) # The columns will be inferred from the query try: - view.columns = view.get_columns() + client_types = self.context["request"].data.get("types", []) + if len(client_types) == 0: + view.columns = view.get_columns() + else: + columns = { + str(item[0]): { + "hogql": CLICKHOUSE_HOGQL_MAPPING[clean_type(str(item[1]))].__name__, + "clickhouse": item[1], + "valid": True, + } + for item in client_types + } + view.columns = columns + view.external_tables = view.s3_tables except Exception as err: raise serializers.ValidationError(str(err)) @@ -151,6 +174,11 @@ class DataWarehouseSavedQueryViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewS search_fields = ["name"] ordering = "-created_at" + def get_serializer_context(self) -> dict[str, Any]: + context = super().get_serializer_context() + context["database"] = create_hogql_database(team_id=self.team_id) + return context + def safely_get_queryset(self, queryset): return queryset.prefetch_related("created_by").exclude(deleted=True).order_by(self.ordering) diff --git a/posthog/warehouse/api/test/test_saved_query.py b/posthog/warehouse/api/test/test_saved_query.py index 6bc7f0c07ac23..6790887374873 100644 --- a/posthog/warehouse/api/test/test_saved_query.py +++ b/posthog/warehouse/api/test/test_saved_query.py @@ -1,7 +1,8 @@ +from unittest.mock import patch import uuid from posthog.test.base import APIBaseTest -from posthog.warehouse.models import DataWarehouseModelPath +from posthog.warehouse.models import DataWarehouseModelPath, DataWarehouseSavedQuery class TestSavedQuery(APIBaseTest): @@ -34,6 +35,36 @@ def test_create(self): ], ) + def test_create_with_types(self): + with patch.object(DataWarehouseSavedQuery, "get_columns") as mock_get_columns: + response = self.client.post( + f"/api/projects/{self.team.id}/warehouse_saved_queries/", + { + "name": "event_view", + "query": { + "kind": "HogQLQuery", + "query": "select event as event from events LIMIT 100", + }, + "types": [["event", "Nullable(String)"]], + }, + ) + assert response.status_code == 201 + saved_query = response.json() + assert saved_query["name"] == "event_view" + assert saved_query["columns"] == [ + { + "key": "event", + "name": "event", + "type": "string", + "schema_valid": True, + "fields": None, + "table": None, + "chain": None, + } + ] + + mock_get_columns.assert_not_called() + def test_create_name_overlap_error(self): response = self.client.post( f"/api/projects/{self.team.id}/warehouse_saved_queries/", diff --git a/posthog/warehouse/api/test/test_view_link.py b/posthog/warehouse/api/test/test_view_link.py index d8de45348b370..4bf4f697ef4a8 100644 --- a/posthog/warehouse/api/test/test_view_link.py +++ b/posthog/warehouse/api/test/test_view_link.py @@ -12,9 +12,56 @@ def test_create(self): "source_table_key": "uuid", "joining_table_key": "id", "field_name": "some_field", + "configuration": None, }, ) self.assertEqual(response.status_code, 201, response.content) + view_link = response.json() + self.assertEqual( + view_link, + { + "id": view_link["id"], + "deleted": False, + "created_by": view_link["created_by"], + "created_at": view_link["created_at"], + "source_table_name": "events", + "source_table_key": "uuid", + "joining_table_name": "persons", + "joining_table_key": "id", + "field_name": "some_field", + "configuration": None, + }, + ) + + def test_create_with_configuration(self): + response = self.client.post( + f"/api/projects/{self.team.id}/warehouse_view_links/", + { + "source_table_name": "events", + "joining_table_name": "persons", + "source_table_key": "uuid", + "joining_table_key": "id", + "field_name": "some_field", + "configuration": {"experiments_optimized": True, "experiments_timestamp_key": "timestamp"}, + }, + ) + self.assertEqual(response.status_code, 201, response.content) + view_link = response.json() + self.assertEqual( + view_link, + { + "id": view_link["id"], + "deleted": False, + "created_by": view_link["created_by"], + "created_at": view_link["created_at"], + "source_table_name": "events", + "source_table_key": "uuid", + "joining_table_name": "persons", + "joining_table_key": "id", + "field_name": "some_field", + "configuration": {"experiments_optimized": True, "experiments_timestamp_key": "timestamp"}, + }, + ) def test_create_key_error(self): response = self.client.post( @@ -55,6 +102,42 @@ def test_create_saved_query_join_key_function(self): ) self.assertEqual(response.status_code, 400, response.content) + def test_update_with_configuration(self): + join = DataWarehouseJoin.objects.create( + team=self.team, + source_table_name="events", + source_table_key="distinct_id", + joining_table_name="persons", + joining_table_key="id", + field_name="some_field", + configuration=None, + ) + join.save() + + response = self.client.patch( + f"/api/projects/{self.team.id}/warehouse_view_links/{join.id}/", + {"configuration": {"experiments_optimized": True, "experiments_timestamp_key": "timestamp"}}, + ) + self.assertEqual(response.status_code, 200, response.content) + view_link = response.json() + self.assertEqual( + view_link, + { + "id": view_link["id"], + "deleted": False, + "created_by": view_link["created_by"], + "created_at": view_link["created_at"], + "source_table_name": "events", + "source_table_key": "distinct_id", + "joining_table_name": "persons", + "joining_table_key": "id", + "field_name": "some_field", + "configuration": {"experiments_optimized": True, "experiments_timestamp_key": "timestamp"}, + }, + ) + join.refresh_from_db() + self.assertEqual(join.configuration, {"experiments_optimized": True, "experiments_timestamp_key": "timestamp"}) + def test_delete(self): response = self.client.post( f"/api/projects/{self.team.id}/warehouse_view_links/", diff --git a/posthog/warehouse/api/view_link.py b/posthog/warehouse/api/view_link.py index e3d701bb64b99..a249dbf9d3859 100644 --- a/posthog/warehouse/api/view_link.py +++ b/posthog/warehouse/api/view_link.py @@ -25,6 +25,7 @@ class Meta: "joining_table_name", "joining_table_key", "field_name", + "configuration", ] read_only_fields = ["id", "created_by", "created_at"] diff --git a/posthog/warehouse/models/join.py b/posthog/warehouse/models/join.py index febbf0182f1ca..b24d6916e93c9 100644 --- a/posthog/warehouse/models/join.py +++ b/posthog/warehouse/models/join.py @@ -3,6 +3,7 @@ from datetime import datetime from django.db import models +from posthog.hogql import ast from posthog.hogql.ast import SelectQuery from posthog.hogql.context import HogQLContext from posthog.hogql.database.models import LazyJoinToAdd @@ -40,6 +41,7 @@ class DataWarehouseJoin(CreatedMetaFields, UUIDModel, DeletedMetaFields): joining_table_name = models.CharField(max_length=400) joining_table_key = models.CharField(max_length=400) field_name = models.CharField(max_length=400) + configuration = models.JSONField(default=dict, null=True) def soft_delete(self): self.deleted = True @@ -94,3 +96,88 @@ def _join_function( return join_expr return _join_function + + def join_function_for_experiments(self): + def _join_function_for_experiments( + join_to_add: LazyJoinToAdd, + context: HogQLContext, + node: SelectQuery, + ): + if self.joining_table_name != "events": + raise ResolutionError("experiments_optimized is only supported for events table") + + if not self.configuration.get("experiments_optimized"): + raise ResolutionError("experiments_optimized is not enabled for this join") + + timestamp_key = self.configuration.get("experiments_timestamp_key") + if not timestamp_key: + raise ResolutionError("experiments_timestamp_key is not set for this join") + + return ast.JoinExpr( + table=ast.SelectQuery( + select=[ + ast.Alias( + alias=name, + expr=ast.Field(chain=["events", *(chain if isinstance(chain, list | tuple) else [chain])]), + ) + for name, chain in { + **join_to_add.fields_accessed, + "timestamp": ["timestamp"], + "distinct_id": ["distinct_id"], + "properties": ["properties"], + }.items() + ], + select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), + ), + # ASOF JOIN finds the most recent matching event that occurred at or before each data warehouse timestamp. + # + # Why this matters: + # When a user performs an action (recorded in data warehouse), we want to know which + # experiment variant they were assigned at that moment. The most recent $feature_flag_called + # event before their action represents their active variant assignment. + # + # Example: + # Data Warehouse: timestamp=2024-01-03 12:00, distinct_id=user1 + # Events: + # 2024-01-02: (user1, variant='control') <- This event will be joined + # 2024-01-03: (user1, variant='test') <- Ignored (occurs after data warehouse timestamp) + # + # This ensures we capture the correct causal relationship: which experiment variant + # was the user assigned to when they performed the action? + join_type="ASOF LEFT JOIN", + alias=join_to_add.to_table, + constraint=ast.JoinConstraint( + expr=ast.And( + exprs=[ + ast.CompareOperation( + left=ast.Field(chain=[join_to_add.to_table, "event"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value="$feature_flag_called"), + ), + ast.CompareOperation( + left=ast.Field( + chain=[ + join_to_add.from_table, + self.source_table_key, + ] + ), + op=ast.CompareOperationOp.Eq, + right=ast.Field(chain=[join_to_add.to_table, "distinct_id"]), + ), + ast.CompareOperation( + left=ast.Field( + chain=[ + join_to_add.from_table, + timestamp_key, + ] + ), + op=ast.CompareOperationOp.GtEq, + right=ast.Field(chain=[join_to_add.to_table, "timestamp"]), + ), + ] + ), + constraint_type="ON", + ), + ) + + return _join_function_for_experiments diff --git a/requirements-dev.in b/requirements-dev.in index 4bb51e1f7bfb4..3a8488c259d5a 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -11,7 +11,7 @@ -c requirements.txt -ruff~=0.6.1 +ruff~=0.8.1 mypy~=1.11.1 mypy-baseline~=0.7.0 mypy-extensions==1.0.0 @@ -23,6 +23,7 @@ Faker==17.5.0 fakeredis[lua]==2.23.3 freezegun==1.2.2 inline-snapshot==0.12.* +multidict==6.0.5 # Not used by us directly, but code won't run on Ubuntu 24.04 unless we resolve this to 6.0.5 packaging==24.1 black~=23.9.1 boto3-stubs[s3] diff --git a/requirements-dev.txt b/requirements-dev.txt index 7586c83bfdf8d..41a109d8cb97f 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -163,6 +163,10 @@ googleapis-common-protos==1.60.0 # via # -c requirements.txt # opentelemetry-exporter-otlp-proto-grpc +greenlet==3.1.1 + # via + # -c requirements.txt + # sqlalchemy grpcio==1.63.2 # via # -c requirements.txt @@ -275,9 +279,10 @@ marshmallow==3.23.1 # via dataclasses-json mdurl==0.1.2 # via markdown-it-py -multidict==6.0.2 +multidict==6.0.5 # via # -c requirements.txt + # -r requirements-dev.in # aiohttp # yarl multiprocess==0.70.16 @@ -527,7 +532,7 @@ ruamel-yaml==0.18.6 # via prance ruamel-yaml-clib==0.2.8 # via ruamel-yaml -ruff==0.6.1 +ruff==0.8.1 # via -r requirements-dev.in sentry-sdk==1.44.1 # via diff --git a/requirements.in b/requirements.in index 74ee8ae363d0e..a592b6fb9b14b 100644 --- a/requirements.in +++ b/requirements.in @@ -52,6 +52,7 @@ langfuse==2.52.1 langgraph==0.2.34 langsmith==0.1.132 lzstring==1.0.4 +multidict==6.0.5 # Not used by us directly, but code won't run on Ubuntu 24.04 unless we resolve this to 6.0.5 natsort==8.4.0 nanoid==2.0.0 numpy==1.23.3 diff --git a/requirements.txt b/requirements.txt index 2d48fceb43a46..1168a500c13d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -271,6 +271,8 @@ googleapis-common-protos==1.60.0 # via # google-api-core # grpcio-status +greenlet==3.1.1 + # via sqlalchemy grpcio==1.63.2 # via # -r requirements.in @@ -393,8 +395,9 @@ more-itertools==9.0.0 # simple-salesforce msgpack==1.1.0 # via langgraph-checkpoint -multidict==6.0.2 +multidict==6.0.5 # via + # -r requirements.in # aiohttp # yarl nanoid==2.0.0 diff --git a/rust/cymbal/src/types/mod.rs b/rust/cymbal/src/types/mod.rs index 01850217127f0..9e2354494e339 100644 --- a/rust/cymbal/src/types/mod.rs +++ b/rust/cymbal/src/types/mod.rs @@ -49,6 +49,11 @@ pub struct Exception { pub struct RawErrProps { #[serde(rename = "$exception_list")] pub exception_list: Vec, + #[serde( + rename = "$exception_fingerprint", + skip_serializing_if = "Option::is_none" + )] + pub fingerprint: Option, // Clients can send us fingerprints, which we'll use if present #[serde(flatten)] // A catch-all for all the properties we don't "care" about, so when we send back to kafka we don't lose any info pub other: HashMap, @@ -57,6 +62,7 @@ pub struct RawErrProps { pub struct FingerprintedErrProps { pub exception_list: Vec, pub fingerprint: String, + pub proposed_fingerprint: String, // We suggest a fingerprint, based on hashes, but let users override client-side pub other: HashMap, } @@ -67,6 +73,8 @@ pub struct OutputErrProps { pub exception_list: Vec, #[serde(rename = "$exception_fingerprint")] pub fingerprint: String, + #[serde(rename = "$exception_proposed_fingerprint")] + pub proposed_fingerprint: String, #[serde(rename = "$exception_issue_id")] pub issue_id: Uuid, #[serde(flatten)] @@ -119,7 +127,8 @@ impl RawErrProps { pub fn to_fingerprinted(self, fingerprint: String) -> FingerprintedErrProps { FingerprintedErrProps { exception_list: self.exception_list, - fingerprint, + fingerprint: self.fingerprint.unwrap_or(fingerprint.clone()), + proposed_fingerprint: fingerprint, other: self.other, } } @@ -131,6 +140,7 @@ impl FingerprintedErrProps { exception_list: self.exception_list, fingerprint: self.fingerprint, issue_id, + proposed_fingerprint: self.proposed_fingerprint, other: self.other, } } diff --git a/rust/cymbal/tests/static/python_err_props.json b/rust/cymbal/tests/static/python_err_props.json index 1a110b81964b1..df4caf67e728e 100644 --- a/rust/cymbal/tests/static/python_err_props.json +++ b/rust/cymbal/tests/static/python_err_props.json @@ -733,6 +733,5 @@ "$ip": "185.140.230.43", "$lib_version__minor": 6, "$lib": "posthog-python", - "$lib_version__major": 3, - "$exception_fingerprint": ["ConnectionRefusedError", "[Errno 111] Connection refused", "_new_conn"] + "$lib_version__major": 3 } diff --git a/rust/feature-flags/src/flags/flag_matching.rs b/rust/feature-flags/src/flags/flag_matching.rs index 73666f02f74eb..04b9dc67c7939 100644 --- a/rust/feature-flags/src/flags/flag_matching.rs +++ b/rust/feature-flags/src/flags/flag_matching.rs @@ -30,6 +30,7 @@ use tokio::time::{sleep, timeout}; use tracing::{error, info}; pub type TeamId = i32; +pub type ProjectId = i32; pub type PersonId = i32; pub type GroupTypeIndex = i32; pub type PostgresReader = Arc; @@ -74,7 +75,7 @@ pub struct GroupTypeMapping { /// These mappings are ingested via the plugin server. #[derive(Clone)] pub struct GroupTypeMappingCache { - team_id: TeamId, + project_id: ProjectId, failed_to_fetch_flags: bool, group_types_to_indexes: HashMap, group_indexes_to_types: HashMap, @@ -82,9 +83,9 @@ pub struct GroupTypeMappingCache { } impl GroupTypeMappingCache { - pub fn new(team_id: TeamId, reader: PostgresReader) -> Self { + pub fn new(project_id: ProjectId, reader: PostgresReader) -> Self { GroupTypeMappingCache { - team_id, + project_id, failed_to_fetch_flags: false, group_types_to_indexes: HashMap::new(), group_indexes_to_types: HashMap::new(), @@ -103,9 +104,8 @@ impl GroupTypeMappingCache { return Ok(self.group_types_to_indexes.clone()); } - let team_id = self.team_id; let mapping = match self - .fetch_group_type_mapping(self.reader.clone(), team_id) + .fetch_group_type_mapping(self.reader.clone(), self.project_id) .await { Ok(mapping) if !mapping.is_empty() => mapping, @@ -163,18 +163,18 @@ impl GroupTypeMappingCache { async fn fetch_group_type_mapping( &mut self, reader: PostgresReader, - team_id: TeamId, + project_id: ProjectId, ) -> Result, FlagError> { let mut conn = reader.as_ref().get_connection().await?; let query = r#" SELECT group_type, group_type_index FROM posthog_grouptypemapping - WHERE team_id = $1 + WHERE project_id = $1 "#; let rows = sqlx::query_as::<_, GroupTypeMapping>(query) - .bind(team_id) + .bind(project_id) .fetch_all(&mut *conn) .await?; diff --git a/rust/feature-flags/src/team/team_models.rs b/rust/feature-flags/src/team/team_models.rs index 2922382561852..a063dec53b012 100644 --- a/rust/feature-flags/src/team/team_models.rs +++ b/rust/feature-flags/src/team/team_models.rs @@ -9,6 +9,12 @@ pub struct Team { pub id: i32, pub name: String, pub api_token: String, + /// Project ID. This field is not present in Redis cache before Dec 2025, but this is not a problem at all, + /// because we know all Teams created before Dec 2025 have `project_id` = `id`. To handle this case gracefully, + /// we use 0 as a fallback value in deserialization here, and handle this in `Team::from_redis`. + /// Thanks to this default-base approach, we avoid invalidating the whole cache needlessly. + #[serde(default)] + pub project_id: i64, // TODO: the following fields are used for the `/decide` response, // but they're not used for flags and they don't live in redis. // At some point I'll need to differentiate between teams in Redis and teams diff --git a/rust/feature-flags/src/team/team_operations.rs b/rust/feature-flags/src/team/team_operations.rs index 4f9b706153cef..690722462fc31 100644 --- a/rust/feature-flags/src/team/team_operations.rs +++ b/rust/feature-flags/src/team/team_operations.rs @@ -21,10 +21,14 @@ impl Team { .await?; // TODO: Consider an LRU cache for teams as well, with small TTL to skip redis/pg lookups - let team: Team = serde_json::from_str(&serialized_team).map_err(|e| { + let mut team: Team = serde_json::from_str(&serialized_team).map_err(|e| { tracing::error!("failed to parse data to team: {}", e); FlagError::RedisDataParsingError })?; + if team.project_id == 0 { + // If `project_id` is 0, this means the payload is from before December 2025, which we correct for here + team.project_id = team.id as i64; + } Ok(team) } @@ -59,7 +63,7 @@ impl Team { ) -> Result { let mut conn = client.get_connection().await?; - let query = "SELECT id, name, api_token FROM posthog_team WHERE api_token = $1"; + let query = "SELECT id, name, api_token, project_id FROM posthog_team WHERE api_token = $1"; let row = sqlx::query_as::<_, Team>(query) .bind(&token) .fetch_one(&mut *conn) @@ -95,6 +99,7 @@ mod tests { .unwrap(); assert_eq!(team_from_redis.api_token, target_token); assert_eq!(team_from_redis.id, team.id); + assert_eq!(team_from_redis.project_id, team.project_id); } #[tokio::test] @@ -120,10 +125,11 @@ mod tests { #[tokio::test] async fn test_corrupted_data_in_redis_is_handled() { // TODO: Extend this test with fallback to pg - let id = rand::thread_rng().gen_range(0..10_000_000); + let id = rand::thread_rng().gen_range(1..10_000_000); let token = random_string("phc_", 12); let team = Team { id, + project_id: i64::from(id) - 1, name: "team".to_string(), api_token: token, }; @@ -153,6 +159,32 @@ mod tests { }; } + #[tokio::test] + async fn test_fetch_team_from_before_project_id_from_redis() { + let client = setup_redis_client(None); + let target_token = "phc_123456789012".to_string(); + // A payload form before December 2025, it's missing `project_id` + let serialized_team = format!( + "{{\"id\":343,\"name\":\"team\",\"api_token\":\"{}\"}}", + target_token + ); + client + .set( + format!("{}{}", TEAM_TOKEN_CACHE_PREFIX, target_token), + serialized_team, + ) + .await + .expect("Failed to write data to redis"); + + let team_from_redis = Team::from_redis(client.clone(), target_token.clone()) + .await + .expect("Failed to fetch team from redis"); + + assert_eq!(team_from_redis.api_token, target_token); + assert_eq!(team_from_redis.id, 343); + assert_eq!(team_from_redis.project_id, 343); // Same as `id` + } + #[tokio::test] async fn test_fetch_team_from_pg() { let client = setup_pg_reader_client(None).await; diff --git a/rust/feature-flags/src/utils/test_utils.rs b/rust/feature-flags/src/utils/test_utils.rs index ad108d0802301..1c6cf2b1caafe 100644 --- a/rust/feature-flags/src/utils/test_utils.rs +++ b/rust/feature-flags/src/utils/test_utils.rs @@ -29,10 +29,11 @@ pub fn random_string(prefix: &str, length: usize) -> String { pub async fn insert_new_team_in_redis( client: Arc, ) -> Result { - let id = rand::thread_rng().gen_range(0..10_000_000); + let id = rand::thread_rng().gen_range(1..10_000_000); let token = random_string("phc_", 12); let team = Team { id, + project_id: i64::from(id) - 1, name: "team".to_string(), api_token: token, }; @@ -206,6 +207,7 @@ pub async fn insert_new_team_in_pg( let token = random_string("phc_", 12); let team = Team { id, + project_id: id as i64, name: "team".to_string(), api_token: token, }; @@ -228,8 +230,8 @@ pub async fn insert_new_team_in_pg( let res = sqlx::query( r#"INSERT INTO posthog_team (id, uuid, organization_id, project_id, api_token, name, created_at, updated_at, app_urls, anonymize_ips, completed_snippet_onboarding, ingested_event, session_recording_opt_in, is_demo, access_control, test_account_filters, timezone, data_attributes, plugins_opt_in, opt_out_capture, event_names, event_names_with_usage, event_properties, event_properties_with_usage, event_properties_numerical) VALUES - ($1, $5, $2::uuid, $1, $3, $4, '2024-06-17 14:40:51.332036+00:00', '2024-06-17', '{}', false, false, false, false, false, false, '{}', 'UTC', '["data-attr"]', false, false, '[]', '[]', '[]', '[]', '[]')"# - ).bind(team.id).bind(ORG_ID).bind(&team.api_token).bind(&team.name).bind(uuid).execute(&mut *conn).await?; + ($1, $2, $3::uuid, $4, $5, $6, '2024-06-17 14:40:51.332036+00:00', '2024-06-17', '{}', false, false, false, false, false, false, '{}', 'UTC', '["data-attr"]', false, false, '[]', '[]', '[]', '[]', '[]')"# + ).bind(team.id).bind(uuid).bind(ORG_ID).bind(team.project_id).bind(&team.api_token).bind(&team.name).execute(&mut *conn).await?; assert_eq!(res.rows_affected(), 1); // Insert group type mappings @@ -246,11 +248,12 @@ pub async fn insert_new_team_in_pg( r#"INSERT INTO posthog_grouptypemapping (group_type, group_type_index, name_singular, name_plural, team_id, project_id) VALUES - ($1, $2, NULL, NULL, $3, $3)"#, + ($1, $2, NULL, NULL, $3, $4)"#, ) .bind(group_type) .bind(group_type_index) .bind(team.id) + .bind(team.project_id) .execute(&mut *conn) .await?; assert_eq!(res.rows_affected(), 1);