diff --git a/.github/workflows/browserslist-update.yml b/.github/workflows/browserslist-update.yml
new file mode 100644
index 0000000000000..9d07dba8fc762
--- /dev/null
+++ b/.github/workflows/browserslist-update.yml
@@ -0,0 +1,40 @@
+name: Update Browserslist database
+
+on:
+ schedule:
+ - cron: '0 12 * * MON'
+ workflow_dispatch:
+
+permissions:
+ contents: write
+ pull-requests: write
+
+jobs:
+ update-browserslist-database:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: 0
+
+ - name: Configure git
+ run: |
+ git config --global user.email "action@github.com"
+ git config --global user.name "Browserslist Update Action"
+
+ - name: Install pnpm
+ uses: pnpm/action-setup@v4
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+ with:
+ node-version: 18.12.1
+
+ - name: Update Browserslist database and create PR if applies
+ uses: c2corg/browserslist-update-action@v2
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ commit_message: 'build: update Browserslist db'
+ title: 'build: update Browserslist db'
+ labels: 'dependencies, automerge'
diff --git a/ee/benchmarks/helpers.py b/ee/benchmarks/helpers.py
index 285a1dc97ee9f..8535e6adef47d 100644
--- a/ee/benchmarks/helpers.py
+++ b/ee/benchmarks/helpers.py
@@ -14,7 +14,7 @@
django.setup()
-from posthog.clickhouse.materialized_columns import get_enabled_materialized_columns # noqa: E402
+from ee.clickhouse.materialized_columns.columns import get_enabled_materialized_columns # noqa: E402
from posthog import client # noqa: E402
from posthog.clickhouse.query_tagging import reset_query_tags, tag_queries # noqa: E402
from posthog.models.utils import UUIDT # noqa: E402
diff --git a/ee/clickhouse/materialized_columns/analyze.py b/ee/clickhouse/materialized_columns/analyze.py
index 43a1e83256912..bfae76ef2432c 100644
--- a/ee/clickhouse/materialized_columns/analyze.py
+++ b/ee/clickhouse/materialized_columns/analyze.py
@@ -171,6 +171,7 @@ def materialize_properties_task(
backfill_period_days: int = MATERIALIZE_COLUMNS_BACKFILL_PERIOD_DAYS,
dry_run: bool = False,
team_id_to_analyze: Optional[int] = None,
+ is_nullable: bool = False,
) -> None:
"""
Creates materialized columns for event and person properties based off of slow queries
@@ -203,7 +204,7 @@ def materialize_properties_task(
logger.info(f"Materializing column. table={table}, property_name={property_name}")
if not dry_run:
- materialize(table, property_name, table_column=table_column)
+ materialize(table, property_name, table_column=table_column, is_nullable=is_nullable)
properties[table].append((property_name, table_column))
if backfill_period_days > 0 and not dry_run:
diff --git a/ee/clickhouse/materialized_columns/columns.py b/ee/clickhouse/materialized_columns/columns.py
index c9624bf96bacd..caa5cae1401c0 100644
--- a/ee/clickhouse/materialized_columns/columns.py
+++ b/ee/clickhouse/materialized_columns/columns.py
@@ -1,33 +1,35 @@
from __future__ import annotations
+import logging
import re
from collections.abc import Callable, Iterator
from copy import copy
from dataclasses import dataclass, replace
from datetime import timedelta
-from typing import Any, Literal, NamedTuple, TypeVar, cast
+from typing import Any, Literal, TypeVar, cast
from clickhouse_driver import Client
from django.utils.timezone import now
+from posthog.cache_utils import cache_for
from posthog.clickhouse.client.connection import default_client
from posthog.clickhouse.cluster import ClickhouseCluster, ConnectionInfo, FuturesMap, HostInfo
from posthog.clickhouse.kafka_engine import trim_quotes_expr
from posthog.clickhouse.materialized_columns import ColumnName, TablesWithMaterializedColumns
from posthog.client import sync_execute
from posthog.models.event.sql import EVENTS_DATA_TABLE
-from posthog.models.instance_setting import get_instance_setting
from posthog.models.person.sql import PERSONS_TABLE
from posthog.models.property import PropertyName, TableColumn, TableWithProperties
from posthog.models.utils import generate_random_short_suffix
from posthog.settings import CLICKHOUSE_DATABASE, CLICKHOUSE_PER_TEAM_SETTINGS, TEST
+
+logger = logging.getLogger(__name__)
+
T = TypeVar("T")
DEFAULT_TABLE_COLUMN: Literal["properties"] = "properties"
-TRIM_AND_EXTRACT_PROPERTY = trim_quotes_expr("JSONExtractRaw({table_column}, %(property)s)")
-
SHORT_TABLE_COLUMN_NAME = {
"properties": "p",
"group_properties": "gp",
@@ -40,15 +42,36 @@
}
-class MaterializedColumn(NamedTuple):
+@dataclass
+class MaterializedColumn:
name: ColumnName
details: MaterializedColumnDetails
+ is_nullable: bool
+
+ @property
+ def type(self) -> str:
+ if self.is_nullable:
+ return "Nullable(String)"
+ else:
+ return "String"
+
+ def get_expression_and_parameters(self) -> tuple[str, dict[str, Any]]:
+ if self.is_nullable:
+ return (
+ f"JSONExtract({self.details.table_column}, %(property_name)s, %(property_type)s)",
+ {"property_name": self.details.property_name, "property_type": self.type},
+ )
+ else:
+ return (
+ trim_quotes_expr(f"JSONExtractRaw({self.details.table_column}, %(property)s)"),
+ {"property": self.details.property_name},
+ )
@staticmethod
def get_all(table: TablesWithMaterializedColumns) -> Iterator[MaterializedColumn]:
rows = sync_execute(
"""
- SELECT name, comment
+ SELECT name, comment, type like 'Nullable(%%)' as is_nullable
FROM system.columns
WHERE database = %(database)s
AND table = %(table)s
@@ -58,8 +81,8 @@ def get_all(table: TablesWithMaterializedColumns) -> Iterator[MaterializedColumn
{"database": CLICKHOUSE_DATABASE, "table": table},
)
- for name, comment in rows:
- yield MaterializedColumn(name, MaterializedColumnDetails.from_column_comment(comment))
+ for name, comment, is_nullable in rows:
+ yield MaterializedColumn(name, MaterializedColumnDetails.from_column_comment(comment), is_nullable)
@staticmethod
def get(table: TablesWithMaterializedColumns, column_name: ColumnName) -> MaterializedColumn:
@@ -111,22 +134,24 @@ def from_column_comment(cls, comment: str) -> MaterializedColumnDetails:
def get_materialized_columns(
table: TablesWithMaterializedColumns,
- exclude_disabled_columns: bool = False,
-) -> dict[tuple[PropertyName, TableColumn], ColumnName]:
- if not get_instance_setting("MATERIALIZED_COLUMNS_ENABLED"):
- return {}
-
+) -> dict[tuple[PropertyName, TableColumn], MaterializedColumn]:
return {
- (column.details.property_name, column.details.table_column): column.name
+ (column.details.property_name, column.details.table_column): column
for column in MaterializedColumn.get_all(table)
- if not (exclude_disabled_columns and column.details.is_disabled)
}
+@cache_for(timedelta(minutes=15))
+def get_enabled_materialized_columns(
+ table: TablesWithMaterializedColumns,
+) -> dict[tuple[PropertyName, TableColumn], MaterializedColumn]:
+ return {k: column for k, column in get_materialized_columns(table).items() if not column.details.is_disabled}
+
+
def get_cluster() -> ClickhouseCluster:
extra_hosts = []
for host_config in map(copy, CLICKHOUSE_PER_TEAM_SETTINGS.values()):
- extra_hosts.append(ConnectionInfo(host_config.pop("host"), host_config.pop("port", None)))
+ extra_hosts.append(ConnectionInfo(host_config.pop("host")))
assert len(host_config) == 0, f"unexpected values: {host_config!r}"
return ClickhouseCluster(default_client(), extra_hosts=extra_hosts)
@@ -161,6 +186,10 @@ def map_data_nodes(self, cluster: ClickhouseCluster, fn: Callable[[Client], T])
}
+def get_minmax_index_name(column: str) -> str:
+ return f"minmax_{column}"
+
+
@dataclass
class CreateColumnOnDataNodesTask:
table: str
@@ -169,20 +198,17 @@ class CreateColumnOnDataNodesTask:
add_column_comment: bool
def execute(self, client: Client) -> None:
+ expression, parameters = self.column.get_expression_and_parameters()
actions = [
- f"""
- ADD COLUMN IF NOT EXISTS {self.column.name} VARCHAR
- MATERIALIZED {TRIM_AND_EXTRACT_PROPERTY.format(table_column=self.column.details.table_column)}
- """,
+ f"ADD COLUMN IF NOT EXISTS {self.column.name} {self.column.type} MATERIALIZED {expression}",
]
- parameters = {"property": self.column.details.property_name}
if self.add_column_comment:
actions.append(f"COMMENT COLUMN {self.column.name} %(comment)s")
parameters["comment"] = self.column.details.as_column_comment()
if self.create_minmax_index:
- index_name = f"minmax_{self.column.name}"
+ index_name = get_minmax_index_name(self.column.name)
actions.append(f"ADD INDEX IF NOT EXISTS {index_name} {self.column.name} TYPE minmax GRANULARITY 1")
client.execute(
@@ -201,7 +227,7 @@ def execute(self, client: Client) -> None:
client.execute(
f"""
ALTER TABLE {self.table}
- ADD COLUMN IF NOT EXISTS {self.column.name} VARCHAR,
+ ADD COLUMN IF NOT EXISTS {self.column.name} {self.column.type},
COMMENT COLUMN {self.column.name} %(comment)s
""",
{"comment": self.column.details.as_column_comment()},
@@ -215,6 +241,7 @@ def materialize(
column_name: ColumnName | None = None,
table_column: TableColumn = DEFAULT_TABLE_COLUMN,
create_minmax_index=not TEST,
+ is_nullable: bool = False,
) -> ColumnName | None:
if (property, table_column) in get_materialized_columns(table):
if TEST:
@@ -235,6 +262,7 @@ def materialize(
property_name=property,
is_disabled=False,
),
+ is_nullable=is_nullable,
)
table_info.map_data_nodes(
@@ -275,20 +303,42 @@ def update_column_is_disabled(table: TablesWithMaterializedColumns, column_name:
cluster = get_cluster()
table_info = tables[table]
+ column = MaterializedColumn.get(table, column_name)
+
cluster.map_all_hosts(
UpdateColumnCommentTask(
table_info.read_table,
- MaterializedColumn(
- name=column_name,
- details=replace(
- MaterializedColumn.get(table, column_name).details,
- is_disabled=is_disabled,
- ),
- ),
+ replace(column, details=replace(column.details, is_disabled=is_disabled)),
).execute
).result()
+def check_index_exists(client: Client, table: str, index: str) -> bool:
+ [(count,)] = client.execute(
+ """
+ SELECT count()
+ FROM system.data_skipping_indices
+ WHERE database = currentDatabase() AND table = %(table)s AND name = %(name)s
+ """,
+ {"table": table, "name": index},
+ )
+ assert 1 >= count >= 0
+ return bool(count)
+
+
+def check_column_exists(client: Client, table: str, column: str) -> bool:
+ [(count,)] = client.execute(
+ """
+ SELECT count()
+ FROM system.columns
+ WHERE database = currentDatabase() AND table = %(table)s AND name = %(name)s
+ """,
+ {"table": table, "name": column},
+ )
+ assert 1 >= count >= 0
+ return bool(count)
+
+
@dataclass
class DropColumnTask:
table: str
@@ -296,19 +346,28 @@ class DropColumnTask:
try_drop_index: bool
def execute(self, client: Client) -> None:
- # XXX: copy/pasted from create task
+ actions = []
+
if self.try_drop_index:
- index_name = f"minmax_{self.column_name}"
+ index_name = get_minmax_index_name(self.column_name)
+ drop_index_action = f"DROP INDEX IF EXISTS {index_name}"
+ if check_index_exists(client, self.table, index_name):
+ actions.append(drop_index_action)
+ else:
+ logger.info("Skipping %r, nothing to do...", drop_index_action)
+
+ drop_column_action = f"DROP COLUMN IF EXISTS {self.column_name}"
+ if check_column_exists(client, self.table, self.column_name):
+ actions.append(drop_column_action)
+ else:
+ logger.info("Skipping %r, nothing to do...", drop_column_action)
+
+ if actions:
client.execute(
- f"ALTER TABLE {self.table} DROP INDEX IF EXISTS {index_name}",
+ f"ALTER TABLE {self.table} " + ", ".join(actions),
settings={"alter_sync": 2 if TEST else 1},
)
- client.execute(
- f"ALTER TABLE {self.table} DROP COLUMN IF EXISTS {self.column_name}",
- settings={"alter_sync": 2 if TEST else 1},
- )
-
def drop_column(table: TablesWithMaterializedColumns, column_name: str) -> None:
cluster = get_cluster()
@@ -345,12 +404,13 @@ def execute(self, client: Client) -> None:
# Note that for this to work all inserts should list columns explicitly
# Improve this if https://github.com/ClickHouse/ClickHouse/issues/27730 ever gets resolved
for column in self.columns:
+ expression, parameters = column.get_expression_and_parameters()
client.execute(
f"""
ALTER TABLE {self.table}
- MODIFY COLUMN {column.name} VARCHAR DEFAULT {TRIM_AND_EXTRACT_PROPERTY.format(table_column=column.details.table_column)}
+ MODIFY COLUMN {column.name} {column.type} DEFAULT {expression}
""",
- {"property": column.details.property_name},
+ parameters,
settings=self.test_settings,
)
@@ -420,10 +480,10 @@ def _materialized_column_name(
prefix += f"{SHORT_TABLE_COLUMN_NAME[table_column]}_"
property_str = re.sub("[^0-9a-zA-Z$]", "_", property)
- existing_materialized_columns = set(get_materialized_columns(table).values())
+ existing_materialized_column_names = {column.name for column in get_materialized_columns(table).values()}
suffix = ""
- while f"{prefix}{property_str}{suffix}" in existing_materialized_columns:
+ while f"{prefix}{property_str}{suffix}" in existing_materialized_column_names:
suffix = "_" + generate_random_short_suffix()
return f"{prefix}{property_str}{suffix}"
diff --git a/ee/clickhouse/materialized_columns/test/test_analyze.py b/ee/clickhouse/materialized_columns/test/test_analyze.py
index 6fdb0fb05cb0e..3b225ab670f92 100644
--- a/ee/clickhouse/materialized_columns/test/test_analyze.py
+++ b/ee/clickhouse/materialized_columns/test/test_analyze.py
@@ -49,9 +49,9 @@ def test_mat_columns(self, patch_backfill, patch_materialize):
materialize_properties_task()
patch_materialize.assert_has_calls(
[
- call("events", "materialize_me", table_column="properties"),
- call("events", "materialize_me2", table_column="properties"),
- call("events", "materialize_person_prop", table_column="person_properties"),
- call("events", "materialize_me3", table_column="properties"),
+ call("events", "materialize_me", table_column="properties", is_nullable=False),
+ call("events", "materialize_me2", table_column="properties", is_nullable=False),
+ call("events", "materialize_person_prop", table_column="person_properties", is_nullable=False),
+ call("events", "materialize_me3", table_column="properties", is_nullable=False),
]
)
diff --git a/ee/clickhouse/materialized_columns/test/test_columns.py b/ee/clickhouse/materialized_columns/test/test_columns.py
index 4cbbef0c4a416..993c1a7aa2f65 100644
--- a/ee/clickhouse/materialized_columns/test/test_columns.py
+++ b/ee/clickhouse/materialized_columns/test/test_columns.py
@@ -1,5 +1,6 @@
from datetime import timedelta
from time import sleep
+from collections.abc import Iterable
from unittest import TestCase
from unittest.mock import patch
@@ -10,11 +11,13 @@
MaterializedColumnDetails,
backfill_materialized_columns,
drop_column,
+ get_enabled_materialized_columns,
get_materialized_columns,
materialize,
update_column_is_disabled,
)
-from posthog.clickhouse.materialized_columns import TablesWithMaterializedColumns, get_enabled_materialized_columns
+from ee.tasks.materialized_columns import mark_all_materialized
+from posthog.clickhouse.materialized_columns import TablesWithMaterializedColumns
from posthog.client import sync_execute
from posthog.conftest import create_clickhouse_tables
from posthog.constants import GROUP_TYPES_LIMIT
@@ -142,11 +145,11 @@ def test_materialized_column_naming(self, mock_choice):
("$foO();ääsqlinject", "properties"): "mat_$foO_____sqlinject_YYYY",
("$foO_____sqlinject", "properties"): "mat_$foO_____sqlinject_ZZZZ",
},
- get_materialized_columns("events"),
+ {k: column.name for k, column in get_materialized_columns("events").items()},
)
self.assertEqual(
- get_materialized_columns("person"),
+ {k: column.name for k, column in get_materialized_columns("person").items()},
{("SoMePrOp", "properties"): "pmat_SoMePrOp"},
)
@@ -241,20 +244,26 @@ def test_backfilling_data(self):
def test_column_types(self):
materialize("events", "myprop", create_minmax_index=True)
+ materialize("events", "myprop_nullable", create_minmax_index=True, is_nullable=True)
- expr = "replaceRegexpAll(JSONExtractRaw(properties, 'myprop'), '^\"|\"$', '')"
- self.assertEqual(("MATERIALIZED", expr), self._get_column_types("mat_myprop"))
+ expr_nonnullable = "replaceRegexpAll(JSONExtractRaw(properties, 'myprop'), '^\"|\"$', '')"
+ expr_nullable = "JSONExtract(properties, 'myprop_nullable', 'Nullable(String)')"
+ self.assertEqual(("String", "MATERIALIZED", expr_nonnullable), self._get_column_types("mat_myprop"))
+ self.assertEqual(
+ ("Nullable(String)", "MATERIALIZED", expr_nullable), self._get_column_types("mat_myprop_nullable")
+ )
- backfill_materialized_columns("events", [("myprop", "properties")], timedelta(days=50))
- self.assertEqual(("DEFAULT", expr), self._get_column_types("mat_myprop"))
+ backfill_materialized_columns(
+ "events", [("myprop", "properties"), ("myprop_nullable", "properties")], timedelta(days=50)
+ )
+ self.assertEqual(("String", "DEFAULT", expr_nonnullable), self._get_column_types("mat_myprop"))
+ self.assertEqual(("Nullable(String)", "DEFAULT", expr_nullable), self._get_column_types("mat_myprop_nullable"))
- try:
- from ee.tasks.materialized_columns import mark_all_materialized
- except ImportError:
- pass
- else:
- mark_all_materialized()
- self.assertEqual(("MATERIALIZED", expr), self._get_column_types("mat_myprop"))
+ mark_all_materialized()
+ self.assertEqual(("String", "MATERIALIZED", expr_nonnullable), self._get_column_types("mat_myprop"))
+ self.assertEqual(
+ ("Nullable(String)", "MATERIALIZED", expr_nullable), self._get_column_types("mat_myprop_nullable")
+ )
def _count_materialized_rows(self, column):
return sync_execute(
@@ -284,7 +293,7 @@ def _get_count_of_mutations_running(self) -> int:
def _get_column_types(self, column: str):
return sync_execute(
"""
- SELECT default_kind, default_expression
+ SELECT type, default_kind, default_expression
FROM system.columns
WHERE database = %(database)s AND table = %(table)s AND name = %(column)s
""",
@@ -306,33 +315,100 @@ def test_lifecycle(self):
# ensure it exists everywhere
key = (property, source_column)
- assert get_materialized_columns(table)[key] == destination_column
+ assert get_materialized_columns(table)[key].name == destination_column
assert MaterializedColumn.get(table, destination_column) == MaterializedColumn(
destination_column,
MaterializedColumnDetails(source_column, property, is_disabled=False),
+ is_nullable=False,
)
# disable it and ensure updates apply as needed
update_column_is_disabled(table, destination_column, is_disabled=True)
- assert get_materialized_columns(table)[key] == destination_column
- assert key not in get_materialized_columns(table, exclude_disabled_columns=True)
+ assert get_materialized_columns(table)[key].name == destination_column
assert MaterializedColumn.get(table, destination_column) == MaterializedColumn(
destination_column,
MaterializedColumnDetails(source_column, property, is_disabled=True),
+ is_nullable=False,
)
# re-enable it and ensure updates apply as needed
update_column_is_disabled(table, destination_column, is_disabled=False)
- assert get_materialized_columns(table, exclude_disabled_columns=False)[key] == destination_column
- assert get_materialized_columns(table, exclude_disabled_columns=True)[key] == destination_column
+ assert get_materialized_columns(table)[key].name == destination_column
assert MaterializedColumn.get(table, destination_column) == MaterializedColumn(
destination_column,
MaterializedColumnDetails(source_column, property, is_disabled=False),
+ is_nullable=False,
)
# drop it and ensure updates apply as needed
drop_column(table, destination_column)
- assert key not in get_materialized_columns(table, exclude_disabled_columns=False)
- assert key not in get_materialized_columns(table, exclude_disabled_columns=True)
+ assert key not in get_materialized_columns(table)
with self.assertRaises(ValueError):
MaterializedColumn.get(table, destination_column)
+
+ def _get_latest_mutation_id(self, table: str) -> str:
+ [(mutation_id,)] = sync_execute(
+ """
+ SELECT max(mutation_id)
+ FROM system.mutations
+ WHERE
+ database = currentDatabase()
+ AND table = %(table)s
+ """,
+ {"table": table},
+ )
+ return mutation_id
+
+ def _get_mutations_since_id(self, table: str, id: str) -> Iterable[str]:
+ return [
+ command
+ for (command,) in sync_execute(
+ """
+ SELECT command
+ FROM system.mutations
+ WHERE
+ database = currentDatabase()
+ AND table = %(table)s
+ AND mutation_id > %(mutation_id)s
+ ORDER BY mutation_id
+ """,
+ {"table": table, "mutation_id": id},
+ )
+ ]
+
+ def test_drop_optimized_no_index(self):
+ table: TablesWithMaterializedColumns = (
+ "person" # little bit easier than events because no shard awareness needed
+ )
+ property: PropertyName = "myprop"
+ source_column: TableColumn = "properties"
+
+ destination_column = materialize(table, property, table_column=source_column, create_minmax_index=False)
+ assert destination_column is not None
+
+ latest_mutation_id_before_drop = self._get_latest_mutation_id(table)
+
+ drop_column(table, destination_column)
+
+ mutations_ran = self._get_mutations_since_id(table, latest_mutation_id_before_drop)
+ assert not any("DROP INDEX" in mutation for mutation in mutations_ran)
+
+ def test_drop_optimized_no_column(self):
+ table: TablesWithMaterializedColumns = (
+ "person" # little bit easier than events because no shard awareness needed
+ )
+ property: PropertyName = "myprop"
+ source_column: TableColumn = "properties"
+
+ # create the materialized column
+ destination_column = materialize(table, property, table_column=source_column, create_minmax_index=False)
+ assert destination_column is not None
+
+ sync_execute(f"ALTER TABLE {table} DROP COLUMN {destination_column}", settings={"alter_sync": 1})
+
+ latest_mutation_id_before_drop = self._get_latest_mutation_id(table)
+
+ drop_column(table, destination_column)
+
+ mutations_ran = self._get_mutations_since_id(table, latest_mutation_id_before_drop)
+ assert not any("DROP COLUMN" in mutation for mutation in mutations_ran)
diff --git a/ee/clickhouse/models/test/test_cohort.py b/ee/clickhouse/models/test/test_cohort.py
index 8af41154c48a5..1600584169a28 100644
--- a/ee/clickhouse/models/test/test_cohort.py
+++ b/ee/clickhouse/models/test/test_cohort.py
@@ -1,4 +1,5 @@
from datetime import datetime, timedelta
+from typing import Optional
from django.utils import timezone
from freezegun import freeze_time
@@ -8,12 +9,13 @@
from posthog.models.action import Action
from posthog.models.cohort import Cohort
from posthog.models.cohort.sql import GET_COHORTPEOPLE_BY_COHORT_ID
-from posthog.models.cohort.util import format_filter_query, get_person_ids_by_cohort_id
+from posthog.models.cohort.util import format_filter_query
from posthog.models.filters import Filter
from posthog.models.organization import Organization
from posthog.models.person import Person
from posthog.models.property.util import parse_prop_grouped_clauses
from posthog.models.team import Team
+from posthog.queries.person_distinct_id_query import get_team_distinct_ids_query
from posthog.queries.util import PersonPropertiesMode
from posthog.schema import PersonsOnEventsMode
from posthog.test.base import (
@@ -25,6 +27,7 @@
snapshot_clickhouse_insert_cohortpeople_queries,
snapshot_clickhouse_queries,
)
+from posthog.models.person.sql import GET_LATEST_PERSON_SQL, GET_PERSON_IDS_BY_FILTER
def _create_action(**kwargs):
@@ -34,12 +37,44 @@ def _create_action(**kwargs):
return action
+def get_person_ids_by_cohort_id(
+ team_id: int,
+ cohort_id: int,
+ limit: Optional[int] = None,
+ offset: Optional[int] = None,
+):
+ from posthog.models.property.util import parse_prop_grouped_clauses
+
+ filter = Filter(data={"properties": [{"key": "id", "value": cohort_id, "type": "cohort"}]})
+ filter_query, filter_params = parse_prop_grouped_clauses(
+ team_id=team_id,
+ property_group=filter.property_groups,
+ table_name="pdi",
+ hogql_context=filter.hogql_context,
+ )
+
+ results = sync_execute(
+ GET_PERSON_IDS_BY_FILTER.format(
+ person_query=GET_LATEST_PERSON_SQL,
+ distinct_query=filter_query,
+ query="",
+ GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id),
+ offset="OFFSET %(offset)s" if offset else "",
+ limit="ORDER BY _timestamp ASC LIMIT %(limit)s" if limit else "",
+ ),
+ {**filter_params, "team_id": team_id, "offset": offset, "limit": limit},
+ )
+
+ return [str(row[0]) for row in results]
+
+
class TestCohort(ClickhouseTestMixin, BaseTest):
- def _get_cohortpeople(self, cohort: Cohort):
+ def _get_cohortpeople(self, cohort: Cohort, *, team_id: Optional[int] = None):
+ team_id = team_id or cohort.team_id
return sync_execute(
GET_COHORTPEOPLE_BY_COHORT_ID,
{
- "team_id": self.team.pk,
+ "team_id": team_id,
"cohort_id": cohort.pk,
"version": cohort.version,
},
@@ -452,7 +487,7 @@ def test_cohort_get_person_ids_by_cohort_id(self):
name="cohort1",
)
- results = get_person_ids_by_cohort_id(self.team, cohort.id)
+ results = get_person_ids_by_cohort_id(self.team.pk, cohort.id)
self.assertEqual(len(results), 2)
self.assertIn(str(user1.uuid), results)
self.assertIn(str(user3.uuid), results)
@@ -468,7 +503,7 @@ def test_insert_by_distinct_id_or_email(self):
cohort = Cohort.objects.create(team=self.team, groups=[], is_static=True)
cohort.insert_users_by_list(["1", "123"])
cohort = Cohort.objects.get()
- results = get_person_ids_by_cohort_id(self.team, cohort.id)
+ results = get_person_ids_by_cohort_id(self.team.pk, cohort.id)
self.assertEqual(len(results), 2)
self.assertEqual(cohort.is_calculating, False)
@@ -483,12 +518,12 @@ def test_insert_by_distinct_id_or_email(self):
# If we accidentally call calculate_people it shouldn't erase people
cohort.calculate_people_ch(pending_version=0)
- results = get_person_ids_by_cohort_id(self.team, cohort.id)
+ results = get_person_ids_by_cohort_id(self.team.pk, cohort.id)
self.assertEqual(len(results), 3)
# if we add people again, don't increase the number of people in cohort
cohort.insert_users_by_list(["123"])
- results = get_person_ids_by_cohort_id(self.team, cohort.id)
+ results = get_person_ids_by_cohort_id(self.team.pk, cohort.id)
self.assertEqual(len(results), 3)
@snapshot_clickhouse_insert_cohortpeople_queries
@@ -1370,3 +1405,45 @@ def test_cohort_versioning(self):
# Should have p1 in this cohort even if version is different
results = self._get_cohortpeople(cohort1)
self.assertEqual(len(results), 1)
+
+ def test_calculate_people_ch_in_multiteam_project(self):
+ # Create another team in the same project
+ team2 = Team.objects.create(organization=self.organization, project=self.team.project)
+
+ # Create people in team 1
+ _person1_team1 = _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person1"],
+ properties={"$some_prop": "else"},
+ )
+ person2_team1 = _create_person(
+ team_id=self.team.pk,
+ distinct_ids=["person2"],
+ properties={"$some_prop": "something"},
+ )
+ # Create people in team 2 with same property
+ person1_team2 = _create_person(
+ team_id=team2.pk,
+ distinct_ids=["person1_team2"],
+ properties={"$some_prop": "something"},
+ )
+ _person2_team2 = _create_person(
+ team_id=team2.pk,
+ distinct_ids=["person2_team2"],
+ properties={"$some_prop": "else"},
+ )
+ # Create cohort in team 2 (but same project as team 1)
+ shared_cohort = Cohort.objects.create(
+ team=team2,
+ groups=[{"properties": [{"key": "$some_prop", "value": "something", "type": "person"}]}],
+ name="shared cohort",
+ )
+ # Calculate cohort
+ shared_cohort.calculate_people_ch(pending_version=0)
+
+ # Verify shared_cohort is now calculated for both teams
+ results_team1 = self._get_cohortpeople(shared_cohort, team_id=self.team.pk)
+ results_team2 = self._get_cohortpeople(shared_cohort, team_id=team2.pk)
+
+ self.assertCountEqual([r[0] for r in results_team1], [person2_team1.uuid])
+ self.assertCountEqual([r[0] for r in results_team2], [person1_team2.uuid])
diff --git a/ee/clickhouse/queries/funnels/funnel_correlation.py b/ee/clickhouse/queries/funnels/funnel_correlation.py
index 3e5b69005d689..0b909c84b398e 100644
--- a/ee/clickhouse/queries/funnels/funnel_correlation.py
+++ b/ee/clickhouse/queries/funnels/funnel_correlation.py
@@ -13,7 +13,7 @@
from ee.clickhouse.queries.column_optimizer import EnterpriseColumnOptimizer
from ee.clickhouse.queries.groups_join_query import GroupsJoinQuery
-from posthog.clickhouse.materialized_columns import get_enabled_materialized_columns
+from posthog.clickhouse.materialized_columns import get_materialized_column_for_property
from posthog.constants import (
AUTOCAPTURE_EVENT,
TREND_FILTER_TYPE_ACTIONS,
@@ -156,8 +156,6 @@ def properties_to_include(self) -> list[str]:
):
# When dealing with properties, make sure funnel response comes with properties
# so we don't have to join on persons/groups to get these properties again
- mat_event_cols = get_enabled_materialized_columns("events")
-
for property_name in cast(list, self._filter.correlation_property_names):
if self._filter.aggregation_group_type_index is not None:
continue # We don't support group properties on events at this time
@@ -165,10 +163,11 @@ def properties_to_include(self) -> list[str]:
if "$all" == property_name:
return [f"person_properties"]
- possible_mat_col = mat_event_cols.get((property_name, "person_properties"))
-
- if possible_mat_col is not None:
- props_to_include.append(possible_mat_col)
+ possible_mat_col = get_materialized_column_for_property(
+ "events", "person_properties", property_name
+ )
+ if possible_mat_col is not None and not possible_mat_col.is_nullable:
+ props_to_include.append(possible_mat_col.name)
else:
props_to_include.append(f"person_properties")
diff --git a/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py b/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py
index 4617ffde3c2d5..c6954e15eed9b 100644
--- a/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py
+++ b/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py
@@ -251,6 +251,7 @@ def test_create_funnel_correlation_cohort(self, _insert_cohort_from_insight_filt
"funnel_correlation_person_entity": "{'id': 'positively_related', 'type': 'events'}",
"funnel_correlation_person_converted": "TrUe",
},
+ self.team.pk,
)
insert_cohort_from_insight_filter(cohort_id, params)
diff --git a/ee/clickhouse/views/test/test_clickhouse_path_person.py b/ee/clickhouse/views/test/test_clickhouse_path_person.py
index 48fc8a2475c06..597aa6dffc5e2 100644
--- a/ee/clickhouse/views/test/test_clickhouse_path_person.py
+++ b/ee/clickhouse/views/test/test_clickhouse_path_person.py
@@ -97,6 +97,7 @@ def test_create_paths_cohort(self, _insert_cohort_from_insight_filter):
"date_from": "2021-05-01",
"date_to": "2021-05-10",
},
+ self.team.pk,
)
insert_cohort_from_insight_filter(cohort_id, params)
diff --git a/ee/management/commands/materialize_columns.py b/ee/management/commands/materialize_columns.py
index c1ca3b3fd2287..5ddbf55dea2b7 100644
--- a/ee/management/commands/materialize_columns.py
+++ b/ee/management/commands/materialize_columns.py
@@ -1,3 +1,4 @@
+import argparse
import logging
from django.core.management.base import BaseCommand
@@ -69,8 +70,14 @@ def add_arguments(self, parser):
default=MATERIALIZE_COLUMNS_MAX_AT_ONCE,
help="Max number of columns to materialize via single invocation. Same as MATERIALIZE_COLUMNS_MAX_AT_ONCE env variable.",
)
+ parser.add_argument(
+ "--nullable",
+ action=argparse.BooleanOptionalAction,
+ default=True,
+ dest="is_nullable",
+ )
- def handle(self, *args, **options):
+ def handle(self, *, is_nullable: bool, **options):
logger.setLevel(logging.INFO)
if options["dry_run"]:
@@ -90,6 +97,7 @@ def handle(self, *args, **options):
],
backfill_period_days=options["backfill_period"],
dry_run=options["dry_run"],
+ is_nullable=is_nullable,
)
else:
materialize_properties_task(
@@ -99,4 +107,5 @@ def handle(self, *args, **options):
backfill_period_days=options["backfill_period"],
dry_run=options["dry_run"],
team_id_to_analyze=options["analyze_team_id"],
+ is_nullable=is_nullable,
)
diff --git a/ee/tasks/materialized_columns.py b/ee/tasks/materialized_columns.py
index d05cdddc0b0ca..98091c3b1d00a 100644
--- a/ee/tasks/materialized_columns.py
+++ b/ee/tasks/materialized_columns.py
@@ -1,50 +1,49 @@
+from collections.abc import Iterator
+from dataclasses import dataclass
from celery.utils.log import get_task_logger
+from clickhouse_driver import Client
-from ee.clickhouse.materialized_columns.columns import (
- TRIM_AND_EXTRACT_PROPERTY,
- get_materialized_columns,
-)
+from ee.clickhouse.materialized_columns.columns import MaterializedColumn, get_cluster, tables as table_infos
from posthog.client import sync_execute
-from posthog.settings import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE
+from posthog.settings import CLICKHOUSE_DATABASE
from posthog.clickhouse.materialized_columns import ColumnName, TablesWithMaterializedColumns
logger = get_task_logger(__name__)
-def mark_all_materialized() -> None:
- if any_ongoing_mutations():
- logger.info("There are running mutations, skipping marking as materialized")
- return
-
- for (
- table,
- property_name,
- table_column,
- column_name,
- ) in get_materialized_columns_with_default_expression():
- updated_table = "sharded_events" if table == "events" else table
-
- # :TRICKY: On cloud, we ON CLUSTER updates to events/sharded_events but not to persons. Why? ¯\_(ツ)_/¯
- execute_on_cluster = f"ON CLUSTER '{CLICKHOUSE_CLUSTER}'" if table == "events" else ""
-
- sync_execute(
- f"""
- ALTER TABLE {updated_table}
- {execute_on_cluster}
- MODIFY COLUMN
- {column_name} VARCHAR MATERIALIZED {TRIM_AND_EXTRACT_PROPERTY.format(table_column=table_column)}
- """,
- {"property": property_name},
+@dataclass
+class MarkMaterializedTask:
+ table: str
+ column: MaterializedColumn
+
+ def execute(self, client: Client) -> None:
+ expression, parameters = self.column.get_expression_and_parameters()
+ client.execute(
+ f"ALTER TABLE {self.table} MODIFY COLUMN {self.column.name} {self.column.type} MATERIALIZED {expression}",
+ parameters,
)
-def get_materialized_columns_with_default_expression():
- tables: list[TablesWithMaterializedColumns] = ["events", "person"]
- for table in tables:
- materialized_columns = get_materialized_columns(table)
- for (property_name, table_column), column_name in materialized_columns.items():
- if is_default_expression(table, column_name):
- yield table, property_name, table_column, column_name
+def mark_all_materialized() -> None:
+ cluster = get_cluster()
+
+ for table_name, column in get_materialized_columns_with_default_expression():
+ table_info = table_infos[table_name]
+ table_info.map_data_nodes(
+ cluster,
+ MarkMaterializedTask(
+ table_info.data_table,
+ column,
+ ).execute,
+ ).result()
+
+
+def get_materialized_columns_with_default_expression() -> Iterator[tuple[str, MaterializedColumn]]:
+ table_names: list[TablesWithMaterializedColumns] = ["events", "person"]
+ for table_name in table_names:
+ for column in MaterializedColumn.get_all(table_name):
+ if is_default_expression(table_name, column.name):
+ yield table_name, column
def any_ongoing_mutations() -> bool:
diff --git a/ee/tasks/test/test_calculate_cohort.py b/ee/tasks/test/test_calculate_cohort.py
index c5264bbe12631..ed0dd3e4290cc 100644
--- a/ee/tasks/test/test_calculate_cohort.py
+++ b/ee/tasks/test/test_calculate_cohort.py
@@ -45,6 +45,7 @@ def test_create_stickiness_cohort(self, _insert_cohort_from_insight_filter):
"stickiness_days": "1",
"label": "$pageview",
},
+ self.team.pk,
)
insert_cohort_from_insight_filter(
@@ -118,6 +119,7 @@ def test_create_trends_cohort(self, _insert_cohort_from_insight_filter):
"date_to": "2021-01-01",
"label": "$pageview",
},
+ self.team.pk,
)
insert_cohort_from_insight_filter(
cohort_id,
@@ -228,6 +230,7 @@ def test_create_trends_cohort_arg_test(self, _insert_cohort_from_insight_filter)
"interval": "day",
"properties": '[{"key": "$domain", "value": "app.posthog.com", "operator": "icontains", "type": "event"}]',
},
+ self.team.pk,
)
insert_cohort_from_insight_filter(
cohort_id,
@@ -357,6 +360,7 @@ def test_create_funnels_cohort(self, _insert_cohort_from_insight_filter):
"date_to": "2021-01-07",
"funnel_step": "1",
},
+ self.team.pk,
)
insert_cohort_from_insight_filter(cohort_id, params)
@@ -445,6 +449,7 @@ def _create_events(data, event="$pageview"):
"entity_order": "0",
"lifecycle_type": "returning",
},
+ self.team.pk,
)
insert_cohort_from_insight_filter(
@@ -507,6 +512,7 @@ def _create_events(data, event="$pageview"):
"entity_order": "0",
"lifecycle_type": "dormant",
},
+ self.team.pk,
)
self.assertEqual(_insert_cohort_from_insight_filter.call_count, 2)
diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png
index 64260538a2c7b..ee27b11bed568 100644
Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png
index 636ef7cab1bf5..b630543a63f45 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png
index 60db13f851093..3a2fe7a7a7229 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png
index 61441f05dddaa..d5450bc6cd3df 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png
index 9a9851e2b50b5..5dae24aa10f4a 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-project-with-replay-features--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png
index 636ef7cab1bf5..b630543a63f45 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png
index 60db13f851093..3a2fe7a7a7229 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-all-options--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png
index 636ef7cab1bf5..b630543a63f45 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png
index 60db13f851093..3a2fe7a7a7229 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-password-only--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png
index 636ef7cab1bf5..b630543a63f45 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png
index 60db13f851093..3a2fe7a7a7229 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-github--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png
index 636ef7cab1bf5..b630543a63f45 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png
index 60db13f851093..3a2fe7a7a7229 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-google--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png
index 636ef7cab1bf5..b630543a63f45 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png
index 60db13f851093..3a2fe7a7a7229 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-enforced-saml--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png
index 636ef7cab1bf5..b630543a63f45 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--dark.png differ
diff --git a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png
index 60db13f851093..3a2fe7a7a7229 100644
Binary files a/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png and b/frontend/__snapshots__/scenes-other-settings--settings-session-timeout-sso-only--light.png differ
diff --git a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx
index b859dae72e071..4ba16ded0e86c 100644
--- a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx
+++ b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx
@@ -323,6 +323,13 @@ export function SummaryTable(): JSX.Element {
],
},
]
+ if (experiment.filters.insight === InsightType.FUNNELS) {
+ if (experiment.filters?.events?.[0]) {
+ filters.push(experiment.filters.events[0])
+ } else if (experiment.filters?.actions?.[0]) {
+ filters.push(experiment.filters.actions[0])
+ }
+ }
const filterGroup: Partial
+ With the PosthogObserver Observer,
+ PostHog will try to record all screen changes automatically.
+
+ If you want to manually send a new screen capture event, use the screen
function.
+
This setting controls if browser console logs will be captured as a part of recordings. The console logs will be shown in the recording player to help you debug any issues. @@ -52,6 +136,19 @@ function CanvasCaptureSettings(): JSX.Element | null { return (
This setting controls if browser canvas elements will be captured as part of recordings.{' '}
@@ -111,6 +208,7 @@ export function NetworkCaptureSettings(): JSX.Element {
return (
<>
+
This setting controls if performance and network information will be captured alongside recordings. The network requests and timings will be shown in the recording player to help you debug any issues. @@ -140,7 +238,7 @@ export function NetworkCaptureSettings(): JSX.Element { Learn how to mask header and payload values in our docs
-Capture headers and body are only available for JavaScript Web.
+Use the settings below to restrict the domains where recordings will be captured. If no domains are selected, then there will be no domain restriction. @@ -399,6 +498,7 @@ export function ReplayGeneral(): JSX.Element { return (
Watch recordings of how users interact with your web app to see what can be improved.{' '} Any posthog/queries/trends/util.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | None"; expected "str" [arg-type] -posthog/queries/column_optimizer/foss_column_optimizer.py:0: error: Argument 1 to "get" of "dict" has incompatible type "tuple[str, str]"; expected "tuple[str, Literal['properties', 'group_properties', 'person_properties']]" [arg-type] +posthog/queries/column_optimizer/foss_column_optimizer.py:0: error: Argument 2 to "get_materialized_column_for_property" has incompatible type "str"; expected "Literal['properties', 'group_properties', 'person_properties']" [arg-type] posthog/hogql/property.py:0: error: Incompatible type for lookup 'id': (got "str | int | list[str]", expected "str | int") [misc] posthog/hogql/property.py:0: error: Incompatible type for lookup 'pk': (got "str | float", expected "str | int") [misc] posthog/api/utils.py:0: error: Incompatible types in assignment (expression has type "type[EventDefinition]", variable has type "type[EnterpriseEventDefinition]") [assignment] @@ -664,7 +664,6 @@ posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" fo posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined] posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "_MonkeyPatchedResponse"; expected type "str" [index] posthog/models/test/test_organization_model.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined] -posthog/hogql_queries/test/test_actors_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment] posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr] posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr] posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr] diff --git a/posthog/api/cohort.py b/posthog/api/cohort.py index 2d5d557f52b0b..762e9a5b4a894 100644 --- a/posthog/api/cohort.py +++ b/posthog/api/cohort.py @@ -14,6 +14,7 @@ ) from posthog.models.person.person import PersonDistinctId from posthog.models.property.property import Property, PropertyGroup +from posthog.models.team.team import Team from posthog.queries.base import property_group_to_Q from posthog.metrics import LABEL_TEAM_ID from posthog.renderers import SafeJSONRenderer @@ -22,8 +23,6 @@ from django.conf import settings from django.db.models import QuerySet, Prefetch, prefetch_related_objects, OuterRef, Subquery -from django.db.models.expressions import F -from django.utils import timezone from rest_framework import serializers, viewsets, request, status from posthog.api.utils import action from rest_framework.exceptions import ValidationError @@ -52,7 +51,7 @@ from posthog.hogql.constants import CSV_EXPORT_LIMIT from posthog.event_usage import report_user_action from posthog.hogql.context import HogQLContext -from posthog.models import Cohort, FeatureFlag, User, Person +from posthog.models import Cohort, FeatureFlag, Person from posthog.models.async_deletion import AsyncDeletion, DeletionType from posthog.models.cohort.util import get_dependent_cohorts, print_cohort_hogql_query from posthog.models.cohort import CohortOrEmpty @@ -139,14 +138,14 @@ def _handle_static(self, cohort: Cohort, context: dict, validated_data: dict) -> elif context.get("from_feature_flag_key"): insert_cohort_from_feature_flag.delay(cohort.pk, context["from_feature_flag_key"], self.context["team_id"]) elif validated_data.get("query"): - insert_cohort_from_query.delay(cohort.pk) + insert_cohort_from_query.delay(cohort.pk, self.context["team_id"]) else: filter_data = request.GET.dict() existing_cohort_id = context.get("from_cohort_id") if existing_cohort_id: filter_data = {**filter_data, "from_cohort_id": existing_cohort_id} if filter_data: - insert_cohort_from_insight_filter.delay(cohort.pk, filter_data) + insert_cohort_from_insight_filter.delay(cohort.pk, filter_data, self.context["team_id"]) def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Cohort: request = self.context["request"] @@ -173,7 +172,7 @@ def _calculate_static_by_csv(self, file, cohort: Cohort) -> None: decoded_file = file.read().decode("utf-8").splitlines() reader = csv.reader(decoded_file) distinct_ids_and_emails = [row[0] for row in reader if len(row) > 0 and row] - calculate_cohort_from_list.delay(cohort.pk, distinct_ids_and_emails) + calculate_cohort_from_list.delay(cohort.pk, distinct_ids_and_emails, team_id=self.context["team_id"]) def validate_query(self, query: Optional[dict]) -> Optional[dict]: if not query: @@ -195,7 +194,7 @@ def validate_filters(self, request_filters: dict): instance = cast(Cohort, self.instance) cohort_id = instance.pk flags: QuerySet[FeatureFlag] = FeatureFlag.objects.filter( - team_id=self.context["team_id"], active=True, deleted=False + team__project_id=self.context["project_id"], active=True, deleted=False ) cohort_used_in_flags = len([flag for flag in flags if cohort_id in flag.get_cohort_ids()]) > 0 @@ -208,7 +207,7 @@ def validate_filters(self, request_filters: dict): ) if prop.type == "cohort": - nested_cohort = Cohort.objects.get(pk=prop.value, team_id=self.context["team_id"]) + nested_cohort = Cohort.objects.get(pk=prop.value, team__project_id=self.context["project_id"]) dependent_cohorts = get_dependent_cohorts(nested_cohort) for dependent_cohort in [nested_cohort, *dependent_cohorts]: if ( @@ -229,7 +228,6 @@ def validate_filters(self, request_filters: dict): def update(self, cohort: Cohort, validated_data: dict, *args: Any, **kwargs: Any) -> Cohort: # type: ignore request = self.context["request"] - user = cast(User, request.user) cohort.name = validated_data.get("name", cohort.name) cohort.description = validated_data.get("description", cohort.description) @@ -240,22 +238,29 @@ def update(self, cohort: Cohort, validated_data: dict, *args: Any, **kwargs: Any is_deletion_change = deleted_state is not None and cohort.deleted != deleted_state if is_deletion_change: + relevant_team_ids = Team.objects.filter(project_id=cohort.team.project_id).values_list("id", flat=True) cohort.deleted = deleted_state if deleted_state: # De-attach from experiments cohort.experiment_set.set([]) - AsyncDeletion.objects.get_or_create( - deletion_type=DeletionType.Cohort_full, - team_id=cohort.team.pk, - key=f"{cohort.pk}_{cohort.version}", - created_by=user, + AsyncDeletion.objects.bulk_create( + [ + AsyncDeletion( + deletion_type=DeletionType.Cohort_full, + team_id=team_id, + # Only appending `team_id` if it's not the same as the cohort's `team_id``, so that + # the migration to environments does not accidentally cause duplicate `AsyncDeletion`s + key=f"{cohort.pk}_{cohort.version}{('_'+team_id) if team_id != cohort.team_id else ''}", + ) + for team_id in relevant_team_ids + ], + ignore_conflicts=True, ) else: AsyncDeletion.objects.filter( deletion_type=DeletionType.Cohort_full, - team_id=cohort.team.pk, - key=f"{cohort.pk}_{cohort.version}", + key__startswith=f"{cohort.pk}_{cohort.version}", # We target this _prefix_, so all teams are covered ).delete() elif not cohort.is_static: cohort.is_calculating = True @@ -475,12 +480,12 @@ def perform_update(self, serializer): class LegacyCohortViewSet(CohortViewSet): - param_derived_from_user_current_team = "project_id" + param_derived_from_user_current_team = "team_id" def will_create_loops(cohort: Cohort) -> bool: # Loops can only be formed when trying to update a Cohort, not when creating one - team_id = cohort.team_id + project_id = cohort.team.project_id # We can model this as a directed graph, where each node is a Cohort and each edge is a reference to another Cohort # There's a loop only if there's a cycle in the directed graph. The "directed" bit is important. @@ -501,7 +506,7 @@ def dfs_loop_helper(current_cohort: Cohort, seen_cohorts, cohorts_on_path): return True elif property.value not in seen_cohorts: try: - nested_cohort = Cohort.objects.get(pk=property.value, team_id=team_id) + nested_cohort = Cohort.objects.get(pk=property.value, team__project_id=project_id) except Cohort.DoesNotExist: raise ValidationError("Invalid Cohort ID in filter") @@ -514,23 +519,21 @@ def dfs_loop_helper(current_cohort: Cohort, seen_cohorts, cohorts_on_path): return dfs_loop_helper(cohort, set(), set()) -def insert_cohort_people_into_pg(cohort: Cohort): +def insert_cohort_people_into_pg(cohort: Cohort, *, team_id: int): ids = sync_execute( - "SELECT person_id FROM {} where team_id = %(team_id)s AND cohort_id = %(cohort_id)s".format( - PERSON_STATIC_COHORT_TABLE - ), - {"cohort_id": cohort.pk, "team_id": cohort.team.pk}, + f"SELECT person_id FROM {PERSON_STATIC_COHORT_TABLE} where team_id = %(team_id)s AND cohort_id = %(cohort_id)s", + {"cohort_id": cohort.pk, "team_id": team_id}, ) - cohort.insert_users_list_by_uuid(items=[str(id[0]) for id in ids]) + cohort.insert_users_list_by_uuid(items=[str(id[0]) for id in ids], team_id=team_id) -def insert_cohort_query_actors_into_ch(cohort: Cohort): - context = HogQLContext(enable_select_queries=True, team_id=cohort.team.pk) - query = print_cohort_hogql_query(cohort, context) - insert_actors_into_cohort_by_query(cohort, query, {}, context) +def insert_cohort_query_actors_into_ch(cohort: Cohort, *, team: Team): + context = HogQLContext(enable_select_queries=True, team_id=team.id) + query = print_cohort_hogql_query(cohort, context, team=team) + insert_actors_into_cohort_by_query(cohort, query, {}, context, team_id=team.id) -def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict): +def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict, *, team_id: int): from_existing_cohort_id = filter_data.get("from_cohort_id") context: HogQLContext @@ -543,7 +546,7 @@ def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict): ORDER BY person_id """ params = { - "team_id": cohort.team.pk, + "team_id": team_id, "from_cohort_id": existing_cohort.pk, "version": existing_cohort.version, } @@ -590,48 +593,36 @@ def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict): else: query, params = query_builder.actor_query(limit_actors=False) - insert_actors_into_cohort_by_query(cohort, query, params, context) - - -def insert_actors_into_cohort_by_query(cohort: Cohort, query: str, params: dict[str, Any], context: HogQLContext): - try: - sync_execute( - INSERT_COHORT_ALL_PEOPLE_THROUGH_PERSON_ID.format(cohort_table=PERSON_STATIC_COHORT_TABLE, query=query), - { - "cohort_id": cohort.pk, - "_timestamp": datetime.now(), - "team_id": cohort.team.pk, - **context.values, - **params, - }, - ) - - cohort.is_calculating = False - cohort.last_calculation = timezone.now() - cohort.errors_calculating = 0 - cohort.last_error_at = None - cohort.save(update_fields=["errors_calculating", "last_calculation", "is_calculating", "last_error_at"]) - except Exception as err: - if settings.DEBUG: - raise - cohort.is_calculating = False - cohort.errors_calculating = F("errors_calculating") + 1 - cohort.last_error_at = timezone.now() - cohort.save(update_fields=["errors_calculating", "is_calculating", "last_error_at"]) - capture_exception(err) + insert_actors_into_cohort_by_query(cohort, query, params, context, team_id=team_id) + + +def insert_actors_into_cohort_by_query( + cohort: Cohort, query: str, params: dict[str, Any], context: HogQLContext, *, team_id: int +): + sync_execute( + INSERT_COHORT_ALL_PEOPLE_THROUGH_PERSON_ID.format(cohort_table=PERSON_STATIC_COHORT_TABLE, query=query), + { + "cohort_id": cohort.pk, + "_timestamp": datetime.now(), + "team_id": team_id, + **context.values, + **params, + }, + ) def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int, batchsize: int = 1_000): # :TODO: Find a way to incorporate this into the same code path as feature flag evaluation + team: Team = Team.objects.get(pk=team_id) try: - feature_flag = FeatureFlag.objects.get(team_id=team_id, key=flag) + feature_flag = FeatureFlag.objects.get(team__project_id=team.project_id, key=flag) except FeatureFlag.DoesNotExist: return [] if not feature_flag.active or feature_flag.deleted or feature_flag.aggregation_group_type_index is not None: return [] - cohort = Cohort.objects.get(pk=cohort_id, team_id=team_id) + cohort = Cohort.objects.get(pk=cohort_id, team__project_id=team.project_id) matcher_cache = FlagsMatcherCache(team_id) uuids_to_add_to_cohort = [] cohorts_cache: dict[int, CohortOrEmpty] = {} @@ -640,7 +631,9 @@ def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int, # TODO: Consider disabling flags with cohorts for creating static cohorts # because this is currently a lot more inefficient for flag matching, # as we're required to go to the database for each person. - cohorts_cache = {cohort.pk: cohort for cohort in Cohort.objects.filter(team_id=team_id, deleted=False)} + cohorts_cache = { + cohort.pk: cohort for cohort in Cohort.objects.filter(team__project_id=team.project_id, deleted=False) + } default_person_properties = {} for condition in feature_flag.conditions: @@ -727,7 +720,7 @@ def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int, if len(uuids_to_add_to_cohort) >= batchsize: cohort.insert_users_list_by_uuid( - uuids_to_add_to_cohort, insert_in_clickhouse=True, batchsize=batchsize + uuids_to_add_to_cohort, insert_in_clickhouse=True, batchsize=batchsize, team_id=team_id ) uuids_to_add_to_cohort = [] @@ -735,7 +728,9 @@ def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int, batch_of_persons = queryset[start : start + batchsize] if len(uuids_to_add_to_cohort) > 0: - cohort.insert_users_list_by_uuid(uuids_to_add_to_cohort, insert_in_clickhouse=True, batchsize=batchsize) + cohort.insert_users_list_by_uuid( + uuids_to_add_to_cohort, insert_in_clickhouse=True, batchsize=batchsize, team_id=team_id + ) except Exception as err: if settings.DEBUG or settings.TEST: diff --git a/posthog/api/organization.py b/posthog/api/organization.py index c522ca164c0b9..6fe798479dd7b 100644 --- a/posthog/api/organization.py +++ b/posthog/api/organization.py @@ -5,6 +5,8 @@ from django.shortcuts import get_object_or_404 from rest_framework import exceptions, permissions, serializers, viewsets from rest_framework.request import Request +from rest_framework.response import Response +import posthoganalytics from posthog import settings from posthog.api.routing import TeamAndOrgViewSetMixin @@ -12,7 +14,7 @@ from posthog.auth import PersonalAPIKeyAuthentication from posthog.cloud_utils import is_cloud from posthog.constants import INTERNAL_BOT_EMAIL_SUFFIX, AvailableFeature -from posthog.event_usage import report_organization_deleted +from posthog.event_usage import report_organization_deleted, groups from posthog.models import Organization, User from posthog.models.async_deletion import AsyncDeletion, DeletionType from posthog.rbac.user_access_control import UserAccessControlSerializerMixin @@ -240,3 +242,24 @@ def get_serializer_context(self) -> dict[str, Any]: **super().get_serializer_context(), "user_permissions": UserPermissions(cast(User, self.request.user)), } + + def update(self, request: Request, *args: Any, **kwargs: Any) -> Response: + if "enforce_2fa" in request.data: + enforce_2fa_value = request.data["enforce_2fa"] + organization = self.get_object() + user = cast(User, request.user) + + # Add capture event for 2FA enforcement change + posthoganalytics.capture( + str(user.distinct_id), + "organization 2fa enforcement toggled", + properties={ + "enabled": enforce_2fa_value, + "organization_id": str(organization.id), + "organization_name": organization.name, + "user_role": user.organization_memberships.get(organization=organization).level, + }, + groups=groups(organization), + ) + + return super().update(request, *args, **kwargs) diff --git a/posthog/api/test/__snapshots__/test_cohort.ambr b/posthog/api/test/__snapshots__/test_cohort.ambr index f1fe8c5d00333..2a4e7cdcc4d86 100644 --- a/posthog/api/test/__snapshots__/test_cohort.ambr +++ b/posthog/api/test/__snapshots__/test_cohort.ambr @@ -98,11 +98,14 @@ # name: TestCohort.test_async_deletion_of_cohort.3 ''' /* user_id:0 celery:posthog.tasks.calculate_cohort.clear_stale_cohort */ - SELECT count() + SELECT team_id, + count() AS stale_people_count FROM cohortpeople - WHERE team_id = 99999 + WHERE team_id IN [1, 2, 3, 4, 5 /* ... */] AND cohort_id = 99999 AND version < 1 + GROUP BY team_id + HAVING stale_people_count > 0 ''' # --- # name: TestCohort.test_async_deletion_of_cohort.4 @@ -163,11 +166,14 @@ # name: TestCohort.test_async_deletion_of_cohort.7 ''' /* user_id:0 celery:posthog.tasks.calculate_cohort.clear_stale_cohort */ - SELECT count() + SELECT team_id, + count() AS stale_people_count FROM cohortpeople - WHERE team_id = 99999 + WHERE team_id IN [1, 2, 3, 4, 5 /* ... */] AND cohort_id = 99999 AND version < 2 + GROUP BY team_id + HAVING stale_people_count > 0 ''' # --- # name: TestCohort.test_async_deletion_of_cohort.8 diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr index b51af7a796f7d..8250f4f667393 100644 --- a/posthog/api/test/__snapshots__/test_feature_flag.ambr +++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr @@ -336,6 +336,69 @@ ''' # --- # name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.1 ''' SELECT "posthog_featureflag"."id", "posthog_featureflag"."key", @@ -353,12 +416,13 @@ "posthog_featureflag"."usage_dashboard_id", "posthog_featureflag"."has_enriched_analytics" FROM "posthog_featureflag" + INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id") WHERE ("posthog_featureflag"."key" = 'some-feature2' - AND "posthog_featureflag"."team_id" = 99999) + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.1 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.2 ''' SELECT "posthog_cohort"."id", "posthog_cohort"."name", @@ -379,12 +443,13 @@ "posthog_cohort"."is_static", "posthog_cohort"."groups" FROM "posthog_cohort" + INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id") WHERE ("posthog_cohort"."id" = 99999 - AND "posthog_cohort"."team_id" = 99999) + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.2 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.3 ''' SELECT "posthog_person"."id", "posthog_person"."created_at", @@ -405,7 +470,7 @@ LIMIT 2 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.3 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.4 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -425,7 +490,7 @@ 5 /* ... */)) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.4 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.5 ''' SELECT "posthog_person"."uuid" FROM "posthog_person" @@ -440,76 +505,6 @@ LIMIT 1))) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.5 - ''' - SELECT "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_web_vitals_opt_in", - "posthog_team"."autocapture_web_vitals_allowed_metrics", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."person_processing_opt_out", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_recording_url_trigger_config", - "posthog_team"."session_recording_url_blocklist_config", - "posthog_team"."session_recording_event_trigger_config", - "posthog_team"."session_replay_config", - "posthog_team"."survey_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."capture_dead_clicks", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE "posthog_team"."id" = 99999 - LIMIT 21 - ''' -# --- # name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.6 ''' SELECT "posthog_person"."id", @@ -589,72 +584,6 @@ ''' # --- # name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too - ''' - SELECT "posthog_featureflag"."id", - "posthog_featureflag"."key", - "posthog_featureflag"."name", - "posthog_featureflag"."filters", - "posthog_featureflag"."rollout_percentage", - "posthog_featureflag"."team_id", - "posthog_featureflag"."created_by_id", - "posthog_featureflag"."created_at", - "posthog_featureflag"."deleted", - "posthog_featureflag"."active", - "posthog_featureflag"."rollback_conditions", - "posthog_featureflag"."performed_rollback", - "posthog_featureflag"."ensure_experience_continuity", - "posthog_featureflag"."usage_dashboard_id", - "posthog_featureflag"."has_enriched_analytics" - FROM "posthog_featureflag" - WHERE ("posthog_featureflag"."key" = 'some-feature-new' - AND "posthog_featureflag"."team_id" = 99999) - LIMIT 21 - ''' -# --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.1 - ''' - SELECT "posthog_cohort"."id", - "posthog_cohort"."name", - "posthog_cohort"."description", - "posthog_cohort"."team_id", - "posthog_cohort"."deleted", - "posthog_cohort"."filters", - "posthog_cohort"."query", - "posthog_cohort"."version", - "posthog_cohort"."pending_version", - "posthog_cohort"."count", - "posthog_cohort"."created_by_id", - "posthog_cohort"."created_at", - "posthog_cohort"."is_calculating", - "posthog_cohort"."last_calculation", - "posthog_cohort"."errors_calculating", - "posthog_cohort"."last_error_at", - "posthog_cohort"."is_static", - "posthog_cohort"."groups" - FROM "posthog_cohort" - WHERE ("posthog_cohort"."id" = 99999 - AND "posthog_cohort"."team_id" = 99999) - LIMIT 21 - ''' -# --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.10 - ''' - SELECT "posthog_person"."uuid" - FROM "posthog_person" - WHERE ("posthog_person"."team_id" = 99999 - AND "posthog_person"."uuid" IN ('00000000000040008000000000000000'::uuid, - '00000000000040008000000000000001'::uuid, - '00000000000040008000000000000002'::uuid, - '00000000000040008000000000000003'::uuid) - AND NOT (EXISTS - (SELECT 1 AS "a" - FROM "posthog_cohortpeople" U1 - WHERE (U1."cohort_id" = 99999 - AND U1."person_id" = ("posthog_person"."id")) - LIMIT 1))) - ''' -# --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.11 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -710,13 +639,6 @@ "posthog_team"."modifiers", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" @@ -724,32 +646,31 @@ LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.2 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.1 ''' - SELECT "posthog_cohort"."id", - "posthog_cohort"."name", - "posthog_cohort"."description", - "posthog_cohort"."team_id", - "posthog_cohort"."deleted", - "posthog_cohort"."filters", - "posthog_cohort"."query", - "posthog_cohort"."version", - "posthog_cohort"."pending_version", - "posthog_cohort"."count", - "posthog_cohort"."created_by_id", - "posthog_cohort"."created_at", - "posthog_cohort"."is_calculating", - "posthog_cohort"."last_calculation", - "posthog_cohort"."errors_calculating", - "posthog_cohort"."last_error_at", - "posthog_cohort"."is_static", - "posthog_cohort"."groups" - FROM "posthog_cohort" - WHERE (NOT "posthog_cohort"."deleted" - AND "posthog_cohort"."team_id" = 99999) + SELECT "posthog_featureflag"."id", + "posthog_featureflag"."key", + "posthog_featureflag"."name", + "posthog_featureflag"."filters", + "posthog_featureflag"."rollout_percentage", + "posthog_featureflag"."team_id", + "posthog_featureflag"."created_by_id", + "posthog_featureflag"."created_at", + "posthog_featureflag"."deleted", + "posthog_featureflag"."active", + "posthog_featureflag"."rollback_conditions", + "posthog_featureflag"."performed_rollback", + "posthog_featureflag"."ensure_experience_continuity", + "posthog_featureflag"."usage_dashboard_id", + "posthog_featureflag"."has_enriched_analytics" + FROM "posthog_featureflag" + INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id") + WHERE ("posthog_featureflag"."key" = 'some-feature-new' + AND "posthog_team"."project_id" = 99999) + LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.3 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.10 ''' SELECT "posthog_person"."id", "posthog_person"."created_at", @@ -786,9 +707,119 @@ AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb)))) ORDER BY "posthog_person"."id" ASC LIMIT 1000 + OFFSET 1000 + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.11 + ''' + SELECT "posthog_person"."uuid" + FROM "posthog_person" + WHERE ("posthog_person"."team_id" = 99999 + AND "posthog_person"."uuid" IN ('00000000000040008000000000000000'::uuid, + '00000000000040008000000000000001'::uuid, + '00000000000040008000000000000002'::uuid, + '00000000000040008000000000000003'::uuid) + AND NOT (EXISTS + (SELECT 1 AS "a" + FROM "posthog_cohortpeople" U1 + WHERE (U1."cohort_id" = 99999 + AND U1."person_id" = ("posthog_person"."id")) + LIMIT 1))) + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.2 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id") + WHERE ("posthog_cohort"."id" = 99999 + AND "posthog_team"."project_id" = 99999) + LIMIT 21 + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.3 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id") + WHERE (NOT "posthog_cohort"."deleted" + AND "posthog_team"."project_id" = 99999) ''' # --- # name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.4 + ''' + SELECT "posthog_person"."id", + "posthog_person"."created_at", + "posthog_person"."properties_last_updated_at", + "posthog_person"."properties_last_operation", + "posthog_person"."team_id", + "posthog_person"."properties", + "posthog_person"."is_user_id", + "posthog_person"."is_identified", + "posthog_person"."uuid", + "posthog_person"."version" + FROM "posthog_person" + WHERE ("posthog_person"."team_id" = 99999 + AND ((("posthog_person"."properties" -> 'group') = '"none"'::jsonb + AND "posthog_person"."properties" ? 'group' + AND NOT (("posthog_person"."properties" -> 'group') = 'null'::jsonb)) + OR (("posthog_person"."properties" -> 'group2') IN ('1'::jsonb, + '2'::jsonb, + '3'::jsonb) + AND "posthog_person"."properties" ? 'group2' + AND NOT (("posthog_person"."properties" -> 'group2') = 'null'::jsonb)) + OR EXISTS + (SELECT 1 AS "a" + FROM "posthog_cohortpeople" U0 + WHERE (U0."cohort_id" = 99999 + AND U0."cohort_id" = 99999 + AND U0."person_id" = ("posthog_person"."id")) + LIMIT 1) + OR (("posthog_person"."properties" -> 'does-not-exist') = '"none"'::jsonb + AND "posthog_person"."properties" ? 'does-not-exist' + AND NOT (("posthog_person"."properties" -> 'does-not-exist') = 'null'::jsonb)) + OR (("posthog_person"."properties" -> 'key') = '"value"'::jsonb + AND "posthog_person"."properties" ? 'key' + AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb)))) + ORDER BY "posthog_person"."id" ASC + LIMIT 1000 + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.5 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -808,7 +839,7 @@ 5 /* ... */)) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.5 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.6 ''' SELECT ("posthog_person"."id" IS NULL OR "posthog_person"."id" IS NULL @@ -827,7 +858,7 @@ AND "posthog_person"."team_id" = 99999) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.6 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.7 ''' SELECT ("posthog_person"."id" IS NOT NULL OR "posthog_person"."id" IS NULL @@ -846,7 +877,7 @@ AND "posthog_person"."team_id" = 99999) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.7 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.8 ''' SELECT ("posthog_person"."id" IS NULL OR "posthog_person"."id" IS NOT NULL @@ -865,7 +896,7 @@ AND "posthog_person"."team_id" = 99999) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.8 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.9 ''' SELECT ("posthog_person"."id" IS NULL OR "posthog_person"."id" IS NULL @@ -884,47 +915,70 @@ AND "posthog_person"."team_id" = 99999) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.9 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment ''' - SELECT "posthog_person"."id", - "posthog_person"."created_at", - "posthog_person"."properties_last_updated_at", - "posthog_person"."properties_last_operation", - "posthog_person"."team_id", - "posthog_person"."properties", - "posthog_person"."is_user_id", - "posthog_person"."is_identified", - "posthog_person"."uuid", - "posthog_person"."version" - FROM "posthog_person" - WHERE ("posthog_person"."team_id" = 99999 - AND ((("posthog_person"."properties" -> 'group') = '"none"'::jsonb - AND "posthog_person"."properties" ? 'group' - AND NOT (("posthog_person"."properties" -> 'group') = 'null'::jsonb)) - OR (("posthog_person"."properties" -> 'group2') IN ('1'::jsonb, - '2'::jsonb, - '3'::jsonb) - AND "posthog_person"."properties" ? 'group2' - AND NOT (("posthog_person"."properties" -> 'group2') = 'null'::jsonb)) - OR EXISTS - (SELECT 1 AS "a" - FROM "posthog_cohortpeople" U0 - WHERE (U0."cohort_id" = 99999 - AND U0."cohort_id" = 99999 - AND U0."person_id" = ("posthog_person"."id")) - LIMIT 1) - OR (("posthog_person"."properties" -> 'does-not-exist') = '"none"'::jsonb - AND "posthog_person"."properties" ? 'does-not-exist' - AND NOT (("posthog_person"."properties" -> 'does-not-exist') = 'null'::jsonb)) - OR (("posthog_person"."properties" -> 'key') = '"value"'::jsonb - AND "posthog_person"."properties" ? 'key' - AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb)))) - ORDER BY "posthog_person"."id" ASC - LIMIT 1000 - OFFSET 1000 + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.1 ''' SELECT "posthog_featureflag"."id", "posthog_featureflag"."key", @@ -942,38 +996,32 @@ "posthog_featureflag"."usage_dashboard_id", "posthog_featureflag"."has_enriched_analytics" FROM "posthog_featureflag" + INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id") WHERE ("posthog_featureflag"."key" = 'some-feature2' - AND "posthog_featureflag"."team_id" = 99999) + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.1 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.10 ''' - SELECT "posthog_cohort"."id", - "posthog_cohort"."name", - "posthog_cohort"."description", - "posthog_cohort"."team_id", - "posthog_cohort"."deleted", - "posthog_cohort"."filters", - "posthog_cohort"."query", - "posthog_cohort"."version", - "posthog_cohort"."pending_version", - "posthog_cohort"."count", - "posthog_cohort"."created_by_id", - "posthog_cohort"."created_at", - "posthog_cohort"."is_calculating", - "posthog_cohort"."last_calculation", - "posthog_cohort"."errors_calculating", - "posthog_cohort"."last_error_at", - "posthog_cohort"."is_static", - "posthog_cohort"."groups" - FROM "posthog_cohort" - WHERE ("posthog_cohort"."id" = 99999 - AND "posthog_cohort"."team_id" = 99999) - LIMIT 21 + SELECT "posthog_person"."id", + "posthog_person"."created_at", + "posthog_person"."properties_last_updated_at", + "posthog_person"."properties_last_operation", + "posthog_person"."team_id", + "posthog_person"."properties", + "posthog_person"."is_user_id", + "posthog_person"."is_identified", + "posthog_person"."uuid", + "posthog_person"."version" + FROM "posthog_person" + WHERE ("posthog_person"."team_id" = 99999 + AND ("posthog_person"."properties" -> 'key') IS NOT NULL) + ORDER BY "posthog_person"."id" ASC + LIMIT 1000 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.10 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.11 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -993,7 +1041,7 @@ 5 /* ... */)) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.11 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.12 ''' SELECT "posthog_person"."id", "posthog_person"."created_at", @@ -1013,7 +1061,7 @@ OFFSET 1000 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.12 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.13 ''' SELECT "posthog_person"."uuid" FROM "posthog_person" @@ -1028,77 +1076,34 @@ LIMIT 1))) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.13 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.2 ''' - SELECT "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_web_vitals_opt_in", - "posthog_team"."autocapture_web_vitals_allowed_metrics", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."person_processing_opt_out", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_recording_url_trigger_config", - "posthog_team"."session_recording_url_blocklist_config", - "posthog_team"."session_recording_event_trigger_config", - "posthog_team"."session_replay_config", - "posthog_team"."survey_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."capture_dead_clicks", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE "posthog_team"."id" = 99999 + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id") + WHERE ("posthog_cohort"."id" = 99999 + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.2 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.3 ''' SELECT "posthog_person"."id", "posthog_person"."created_at", @@ -1119,7 +1124,7 @@ LIMIT 1000 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.3 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.4 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1139,7 +1144,7 @@ 5 /* ... */)) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.4 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.5 ''' SELECT "posthog_person"."id", "posthog_person"."created_at", @@ -1161,7 +1166,7 @@ OFFSET 1000 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.5 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.6 ''' SELECT "posthog_person"."uuid" FROM "posthog_person" @@ -1175,7 +1180,7 @@ LIMIT 1))) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.6 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.7 ''' SELECT "posthog_team"."id", "posthog_team"."uuid", @@ -1231,13 +1236,6 @@ "posthog_team"."modifiers", "posthog_team"."correlation_config", "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", "posthog_team"."external_data_workspace_id", "posthog_team"."external_data_workspace_last_synced_at" FROM "posthog_team" @@ -1245,7 +1243,7 @@ LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.7 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.8 ''' SELECT "posthog_featureflag"."id", "posthog_featureflag"."key", @@ -1263,12 +1261,13 @@ "posthog_featureflag"."usage_dashboard_id", "posthog_featureflag"."has_enriched_analytics" FROM "posthog_featureflag" + INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id") WHERE ("posthog_featureflag"."key" = 'some-feature-new' - AND "posthog_featureflag"."team_id" = 99999) + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.8 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.9 ''' SELECT "posthog_cohort"."id", "posthog_cohort"."name", @@ -1289,31 +1288,76 @@ "posthog_cohort"."is_static", "posthog_cohort"."groups" FROM "posthog_cohort" + INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id") WHERE ("posthog_cohort"."id" = 99999 - AND "posthog_cohort"."team_id" = 99999) + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.9 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag ''' - SELECT "posthog_person"."id", - "posthog_person"."created_at", - "posthog_person"."properties_last_updated_at", - "posthog_person"."properties_last_operation", - "posthog_person"."team_id", - "posthog_person"."properties", - "posthog_person"."is_user_id", - "posthog_person"."is_identified", - "posthog_person"."uuid", - "posthog_person"."version" - FROM "posthog_person" - WHERE ("posthog_person"."team_id" = 99999 - AND ("posthog_person"."properties" -> 'key') IS NOT NULL) - ORDER BY "posthog_person"."id" ASC - LIMIT 1000 + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.1 ''' SELECT "posthog_featureflag"."id", "posthog_featureflag"."key", @@ -1331,12 +1375,13 @@ "posthog_featureflag"."usage_dashboard_id", "posthog_featureflag"."has_enriched_analytics" FROM "posthog_featureflag" + INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id") WHERE ("posthog_featureflag"."key" = 'some-feature2' - AND "posthog_featureflag"."team_id" = 99999) + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.1 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.2 ''' SELECT "posthog_cohort"."id", "posthog_cohort"."name", @@ -1357,12 +1402,13 @@ "posthog_cohort"."is_static", "posthog_cohort"."groups" FROM "posthog_cohort" + INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id") WHERE ("posthog_cohort"."id" = 99999 - AND "posthog_cohort"."team_id" = 99999) + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.2 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.3 ''' SELECT "posthog_person"."id", "posthog_person"."created_at", @@ -1383,7 +1429,7 @@ LIMIT 1000 ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.3 +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.4 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1403,20 +1449,6 @@ 5 /* ... */)) ''' # --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.4 - ''' - SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key", - "posthog_featureflaghashkeyoverride"."hash_key", - "posthog_featureflaghashkeyoverride"."person_id" - FROM "posthog_featureflaghashkeyoverride" - WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (1, - 2, - 3, - 4, - 5 /* ... */) - AND "posthog_featureflaghashkeyoverride"."team_id" = 99999) - ''' -# --- # name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.5 ''' SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key", @@ -1427,128 +1459,72 @@ 2, 3, 4, - 5 /* ... */) - AND "posthog_featureflaghashkeyoverride"."team_id" = 99999) - ''' -# --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.6 - ''' - SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key", - "posthog_featureflaghashkeyoverride"."hash_key", - "posthog_featureflaghashkeyoverride"."person_id" - FROM "posthog_featureflaghashkeyoverride" - WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (1, - 2, - 3, - 4, - 5 /* ... */) - AND "posthog_featureflaghashkeyoverride"."team_id" = 99999) - ''' -# --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.7 - ''' - SELECT "posthog_person"."id", - "posthog_person"."created_at", - "posthog_person"."properties_last_updated_at", - "posthog_person"."properties_last_operation", - "posthog_person"."team_id", - "posthog_person"."properties", - "posthog_person"."is_user_id", - "posthog_person"."is_identified", - "posthog_person"."uuid", - "posthog_person"."version" - FROM "posthog_person" - WHERE ("posthog_person"."team_id" = 99999 - AND ("posthog_person"."properties" -> 'key') = '"value"'::jsonb - AND "posthog_person"."properties" ? 'key' - AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb)) - ORDER BY "posthog_person"."id" ASC - LIMIT 1000 - OFFSET 1000 - ''' -# --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.8 - ''' - SELECT "posthog_person"."uuid" - FROM "posthog_person" - WHERE ("posthog_person"."team_id" = 99999 - AND "posthog_person"."uuid" IN ('00000000000040008000000000000002'::uuid) - AND NOT (EXISTS - (SELECT 1 AS "a" - FROM "posthog_cohortpeople" U1 - WHERE (U1."cohort_id" = 99999 - AND U1."person_id" = ("posthog_person"."id")) - LIMIT 1))) - ''' -# --- -# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.9 - ''' - SELECT "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_web_vitals_opt_in", - "posthog_team"."autocapture_web_vitals_allowed_metrics", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."person_processing_opt_out", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_recording_url_trigger_config", - "posthog_team"."session_recording_url_blocklist_config", - "posthog_team"."session_recording_event_trigger_config", - "posthog_team"."session_replay_config", - "posthog_team"."survey_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."capture_dead_clicks", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE "posthog_team"."id" = 99999 - LIMIT 21 + 5 /* ... */) + AND "posthog_featureflaghashkeyoverride"."team_id" = 99999) + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.6 + ''' + SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key", + "posthog_featureflaghashkeyoverride"."hash_key", + "posthog_featureflaghashkeyoverride"."person_id" + FROM "posthog_featureflaghashkeyoverride" + WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (1, + 2, + 3, + 4, + 5 /* ... */) + AND "posthog_featureflaghashkeyoverride"."team_id" = 99999) + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.7 + ''' + SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key", + "posthog_featureflaghashkeyoverride"."hash_key", + "posthog_featureflaghashkeyoverride"."person_id" + FROM "posthog_featureflaghashkeyoverride" + WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (1, + 2, + 3, + 4, + 5 /* ... */) + AND "posthog_featureflaghashkeyoverride"."team_id" = 99999) + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.8 + ''' + SELECT "posthog_person"."id", + "posthog_person"."created_at", + "posthog_person"."properties_last_updated_at", + "posthog_person"."properties_last_operation", + "posthog_person"."team_id", + "posthog_person"."properties", + "posthog_person"."is_user_id", + "posthog_person"."is_identified", + "posthog_person"."uuid", + "posthog_person"."version" + FROM "posthog_person" + WHERE ("posthog_person"."team_id" = 99999 + AND ("posthog_person"."properties" -> 'key') = '"value"'::jsonb + AND "posthog_person"."properties" ? 'key' + AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb)) + ORDER BY "posthog_person"."id" ASC + LIMIT 1000 + OFFSET 1000 + ''' +# --- +# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.9 + ''' + SELECT "posthog_person"."uuid" + FROM "posthog_person" + WHERE ("posthog_person"."team_id" = 99999 + AND "posthog_person"."uuid" IN ('00000000000040008000000000000002'::uuid) + AND NOT (EXISTS + (SELECT 1 AS "a" + FROM "posthog_cohortpeople" U1 + WHERE (U1."cohort_id" = 99999 + AND U1."person_id" = ("posthog_person"."id")) + LIMIT 1))) ''' # --- # name: TestFeatureFlag.test_cant_create_flag_with_data_that_fails_to_query @@ -1668,6 +1644,33 @@ ''' # --- # name: TestFeatureFlag.test_creating_static_cohort.10 + ''' + SELECT "posthog_cohort"."id", + "posthog_cohort"."name", + "posthog_cohort"."description", + "posthog_cohort"."team_id", + "posthog_cohort"."deleted", + "posthog_cohort"."filters", + "posthog_cohort"."query", + "posthog_cohort"."version", + "posthog_cohort"."pending_version", + "posthog_cohort"."count", + "posthog_cohort"."created_by_id", + "posthog_cohort"."created_at", + "posthog_cohort"."is_calculating", + "posthog_cohort"."last_calculation", + "posthog_cohort"."errors_calculating", + "posthog_cohort"."last_error_at", + "posthog_cohort"."is_static", + "posthog_cohort"."groups" + FROM "posthog_cohort" + INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id") + WHERE ("posthog_cohort"."id" = 99999 + AND "posthog_team"."project_id" = 99999) + LIMIT 21 + ''' +# --- +# name: TestFeatureFlag.test_creating_static_cohort.11 ''' SELECT "posthog_person"."id", "posthog_person"."created_at", @@ -1688,7 +1691,7 @@ LIMIT 10000 ''' # --- -# name: TestFeatureFlag.test_creating_static_cohort.11 +# name: TestFeatureFlag.test_creating_static_cohort.12 ''' SELECT "posthog_persondistinctid"."id", "posthog_persondistinctid"."team_id", @@ -1708,7 +1711,7 @@ 5 /* ... */)) ''' # --- -# name: TestFeatureFlag.test_creating_static_cohort.12 +# name: TestFeatureFlag.test_creating_static_cohort.13 ''' SELECT "posthog_person"."id", "posthog_person"."created_at", @@ -1730,7 +1733,7 @@ OFFSET 10000 ''' # --- -# name: TestFeatureFlag.test_creating_static_cohort.13 +# name: TestFeatureFlag.test_creating_static_cohort.14 ''' SELECT "posthog_person"."uuid" FROM "posthog_person" @@ -1744,76 +1747,6 @@ LIMIT 1))) ''' # --- -# name: TestFeatureFlag.test_creating_static_cohort.14 - ''' - SELECT "posthog_team"."id", - "posthog_team"."uuid", - "posthog_team"."organization_id", - "posthog_team"."project_id", - "posthog_team"."api_token", - "posthog_team"."app_urls", - "posthog_team"."name", - "posthog_team"."slack_incoming_webhook", - "posthog_team"."created_at", - "posthog_team"."updated_at", - "posthog_team"."anonymize_ips", - "posthog_team"."completed_snippet_onboarding", - "posthog_team"."has_completed_onboarding_for", - "posthog_team"."ingested_event", - "posthog_team"."autocapture_opt_out", - "posthog_team"."autocapture_web_vitals_opt_in", - "posthog_team"."autocapture_web_vitals_allowed_metrics", - "posthog_team"."autocapture_exceptions_opt_in", - "posthog_team"."autocapture_exceptions_errors_to_ignore", - "posthog_team"."person_processing_opt_out", - "posthog_team"."session_recording_opt_in", - "posthog_team"."session_recording_sample_rate", - "posthog_team"."session_recording_minimum_duration_milliseconds", - "posthog_team"."session_recording_linked_flag", - "posthog_team"."session_recording_network_payload_capture_config", - "posthog_team"."session_recording_url_trigger_config", - "posthog_team"."session_recording_url_blocklist_config", - "posthog_team"."session_recording_event_trigger_config", - "posthog_team"."session_replay_config", - "posthog_team"."survey_config", - "posthog_team"."capture_console_log_opt_in", - "posthog_team"."capture_performance_opt_in", - "posthog_team"."capture_dead_clicks", - "posthog_team"."surveys_opt_in", - "posthog_team"."heatmaps_opt_in", - "posthog_team"."session_recording_version", - "posthog_team"."signup_token", - "posthog_team"."is_demo", - "posthog_team"."access_control", - "posthog_team"."week_start_day", - "posthog_team"."inject_web_apps", - "posthog_team"."test_account_filters", - "posthog_team"."test_account_filters_default_checked", - "posthog_team"."path_cleaning_filters", - "posthog_team"."timezone", - "posthog_team"."data_attributes", - "posthog_team"."person_display_name_properties", - "posthog_team"."live_events_columns", - "posthog_team"."recording_domains", - "posthog_team"."primary_dashboard_id", - "posthog_team"."extra_settings", - "posthog_team"."modifiers", - "posthog_team"."correlation_config", - "posthog_team"."session_recording_retention_period_days", - "posthog_team"."plugins_opt_in", - "posthog_team"."opt_out_capture", - "posthog_team"."event_names", - "posthog_team"."event_names_with_usage", - "posthog_team"."event_properties", - "posthog_team"."event_properties_with_usage", - "posthog_team"."event_properties_numerical", - "posthog_team"."external_data_workspace_id", - "posthog_team"."external_data_workspace_last_synced_at" - FROM "posthog_team" - WHERE "posthog_team"."id" = 99999 - LIMIT 21 - ''' -# --- # name: TestFeatureFlag.test_creating_static_cohort.15 ''' SELECT "posthog_team"."id", @@ -2141,6 +2074,69 @@ ''' # --- # name: TestFeatureFlag.test_creating_static_cohort.8 + ''' + SELECT "posthog_team"."id", + "posthog_team"."uuid", + "posthog_team"."organization_id", + "posthog_team"."project_id", + "posthog_team"."api_token", + "posthog_team"."app_urls", + "posthog_team"."name", + "posthog_team"."slack_incoming_webhook", + "posthog_team"."created_at", + "posthog_team"."updated_at", + "posthog_team"."anonymize_ips", + "posthog_team"."completed_snippet_onboarding", + "posthog_team"."has_completed_onboarding_for", + "posthog_team"."ingested_event", + "posthog_team"."autocapture_opt_out", + "posthog_team"."autocapture_web_vitals_opt_in", + "posthog_team"."autocapture_web_vitals_allowed_metrics", + "posthog_team"."autocapture_exceptions_opt_in", + "posthog_team"."autocapture_exceptions_errors_to_ignore", + "posthog_team"."person_processing_opt_out", + "posthog_team"."session_recording_opt_in", + "posthog_team"."session_recording_sample_rate", + "posthog_team"."session_recording_minimum_duration_milliseconds", + "posthog_team"."session_recording_linked_flag", + "posthog_team"."session_recording_network_payload_capture_config", + "posthog_team"."session_recording_url_trigger_config", + "posthog_team"."session_recording_url_blocklist_config", + "posthog_team"."session_recording_event_trigger_config", + "posthog_team"."session_replay_config", + "posthog_team"."survey_config", + "posthog_team"."capture_console_log_opt_in", + "posthog_team"."capture_performance_opt_in", + "posthog_team"."capture_dead_clicks", + "posthog_team"."surveys_opt_in", + "posthog_team"."heatmaps_opt_in", + "posthog_team"."session_recording_version", + "posthog_team"."signup_token", + "posthog_team"."is_demo", + "posthog_team"."access_control", + "posthog_team"."week_start_day", + "posthog_team"."inject_web_apps", + "posthog_team"."test_account_filters", + "posthog_team"."test_account_filters_default_checked", + "posthog_team"."path_cleaning_filters", + "posthog_team"."timezone", + "posthog_team"."data_attributes", + "posthog_team"."person_display_name_properties", + "posthog_team"."live_events_columns", + "posthog_team"."recording_domains", + "posthog_team"."primary_dashboard_id", + "posthog_team"."extra_settings", + "posthog_team"."modifiers", + "posthog_team"."correlation_config", + "posthog_team"."session_recording_retention_period_days", + "posthog_team"."external_data_workspace_id", + "posthog_team"."external_data_workspace_last_synced_at" + FROM "posthog_team" + WHERE "posthog_team"."id" = 99999 + LIMIT 21 + ''' +# --- +# name: TestFeatureFlag.test_creating_static_cohort.9 ''' SELECT "posthog_featureflag"."id", "posthog_featureflag"."key", @@ -2158,34 +2154,9 @@ "posthog_featureflag"."usage_dashboard_id", "posthog_featureflag"."has_enriched_analytics" FROM "posthog_featureflag" + INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id") WHERE ("posthog_featureflag"."key" = 'some-feature' - AND "posthog_featureflag"."team_id" = 99999) - LIMIT 21 - ''' -# --- -# name: TestFeatureFlag.test_creating_static_cohort.9 - ''' - SELECT "posthog_cohort"."id", - "posthog_cohort"."name", - "posthog_cohort"."description", - "posthog_cohort"."team_id", - "posthog_cohort"."deleted", - "posthog_cohort"."filters", - "posthog_cohort"."query", - "posthog_cohort"."version", - "posthog_cohort"."pending_version", - "posthog_cohort"."count", - "posthog_cohort"."created_by_id", - "posthog_cohort"."created_at", - "posthog_cohort"."is_calculating", - "posthog_cohort"."last_calculation", - "posthog_cohort"."errors_calculating", - "posthog_cohort"."last_error_at", - "posthog_cohort"."is_static", - "posthog_cohort"."groups" - FROM "posthog_cohort" - WHERE ("posthog_cohort"."id" = 99999 - AND "posthog_cohort"."team_id" = 99999) + AND "posthog_team"."project_id" = 99999) LIMIT 21 ''' # --- diff --git a/posthog/api/test/test_cohort.py b/posthog/api/test/test_cohort.py index 5e16d6b7bc519..8a32a8b9a5dd0 100644 --- a/posthog/api/test/test_cohort.py +++ b/posthog/api/test/test_cohort.py @@ -1111,7 +1111,7 @@ def _calc(query: str) -> int: self.assertEqual(1, _calc("select 1 from events")) # raises on all other cases - response = self.client.post( + query_post_response = self.client.post( f"/api/projects/{self.team.id}/cohorts", data={ "name": "cohort A", @@ -1122,7 +1122,15 @@ def _calc(query: str) -> int: }, }, ) - self.assertEqual(response.status_code, 500, response.content) + query_get_response = self.client.get( + f"/api/projects/{self.team.id}/cohorts/{query_post_response.json()['id']}/" + ) + + self.assertEqual(query_post_response.status_code, 201) + self.assertEqual(query_get_response.status_code, 200) + self.assertEqual( + query_get_response.json()["errors_calculating"], 1 + ) # Should be because selecting from groups is not allowed @patch("posthog.api.cohort.report_user_action") def test_cohort_with_is_set_filter_missing_value(self, patch_capture): diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index 4fd4183c1ac38..7ffb834c41e4a 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -2229,7 +2229,7 @@ def test_local_evaluation_for_invalid_cohorts(self, mock_capture): self.client.logout() - with self.assertNumQueries(16): + with self.assertNumQueries(18): # 1. SAVEPOINT # 2. SELECT "posthog_personalapikey"."id", # 3. RELEASE SAVEPOINT @@ -2242,10 +2242,12 @@ def test_local_evaluation_for_invalid_cohorts(self, mock_capture): # 10. SELECT "posthog_organizationmembership"."id", # 11. SELECT "posthog_cohort"."id" -- all cohorts # 12. SELECT "posthog_featureflag"."id", "posthog_featureflag"."key", -- all flags - # 13. SELECT "posthog_cohort". id = 99999 - # 14. SELECT "posthog_cohort". id = deleted cohort - # 15. SELECT "posthog_cohort". id = cohort from other team - # 16. SELECT "posthog_grouptypemapping"."id", -- group type mapping + # 13. SELECT "posthog_team"."id", "posthog_team"."uuid", + # 14. SELECT "posthog_cohort". id = 99999 + # 15. SELECT "posthog_team"."id", "posthog_team"."uuid", + # 16. SELECT "posthog_cohort". id = deleted cohort + # 17. SELECT "posthog_cohort". id = cohort from other team + # 18. SELECT "posthog_grouptypemapping"."id", -- group type mapping response = self.client.get( f"/api/feature_flag/local_evaluation?token={self.team.api_token}&send_cohorts", @@ -4230,7 +4232,7 @@ def test_creating_static_cohort_with_deleted_flag(self): name="some cohort", ) - with self.assertNumQueries(1): + with self.assertNumQueries(2): get_cohort_actors_for_feature_flag(cohort.pk, "some-feature", self.team.pk) cohort.refresh_from_db() @@ -4268,7 +4270,7 @@ def test_creating_static_cohort_with_inactive_flag(self): name="some cohort", ) - with self.assertNumQueries(1): + with self.assertNumQueries(2): get_cohort_actors_for_feature_flag(cohort.pk, "some-feature2", self.team.pk) cohort.refresh_from_db() @@ -4307,7 +4309,7 @@ def test_creating_static_cohort_with_group_flag(self): name="some cohort", ) - with self.assertNumQueries(1): + with self.assertNumQueries(2): get_cohort_actors_for_feature_flag(cohort.pk, "some-feature3", self.team.pk) cohort.refresh_from_db() @@ -4339,7 +4341,7 @@ def test_creating_static_cohort_with_no_person_distinct_ids(self): name="some cohort", ) - with self.assertNumQueries(5): + with self.assertNumQueries(6): get_cohort_actors_for_feature_flag(cohort.pk, "some-feature2", self.team.pk) cohort.refresh_from_db() @@ -4357,7 +4359,7 @@ def test_creating_static_cohort_with_non_existing_flag(self): name="some cohort", ) - with self.assertNumQueries(1): + with self.assertNumQueries(2): get_cohort_actors_for_feature_flag(cohort.pk, "some-feature2", self.team.pk) cohort.refresh_from_db() diff --git a/posthog/api/test/test_organization.py b/posthog/api/test/test_organization.py index 2396f78e3c557..143fbe3f524b9 100644 --- a/posthog/api/test/test_organization.py +++ b/posthog/api/test/test_organization.py @@ -1,4 +1,5 @@ from rest_framework import status +from unittest.mock import patch, ANY from posthog.models import Organization, OrganizationMembership, Team from posthog.models.personal_api_key import PersonalAPIKey, hash_key_value @@ -128,7 +129,8 @@ def test_cant_update_plugins_access_level(self): self.organization.refresh_from_db() self.assertEqual(self.organization.plugins_access_level, 3) - def test_enforce_2fa_for_everyone(self): + @patch("posthoganalytics.capture") + def test_enforce_2fa_for_everyone(self, mock_capture): # Only admins should be able to enforce 2fa response = self.client.patch(f"/api/organizations/{self.organization.id}/", {"enforce_2fa": True}) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -142,6 +144,19 @@ def test_enforce_2fa_for_everyone(self): self.organization.refresh_from_db() self.assertEqual(self.organization.enforce_2fa, True) + # Verify the capture event was called correctly + mock_capture.assert_any_call( + self.user.distinct_id, + "organization 2fa enforcement toggled", + properties={ + "enabled": True, + "organization_id": str(self.organization.id), + "organization_name": self.organization.name, + "user_role": OrganizationMembership.Level.ADMIN, + }, + groups={"instance": ANY, "organization": str(self.organization.id)}, + ) + def test_projects_outside_personal_api_key_scoped_organizations_not_listed(self): other_org, _, _ = Organization.objects.bootstrap(self.user) personal_api_key = generate_random_token_personal() diff --git a/posthog/api/test/test_person.py b/posthog/api/test/test_person.py index 2c9694f6eda6d..29eb3990407d5 100644 --- a/posthog/api/test/test_person.py +++ b/posthog/api/test/test_person.py @@ -873,7 +873,7 @@ def test_pagination_limit(self): create_person(team_id=self.team.pk, version=0) returned_ids = [] - with self.assertNumQueries(10): + with self.assertNumQueries(9): response = self.client.get("/api/person/?limit=10").json() self.assertEqual(len(response["results"]), 9) returned_ids += [x["distinct_ids"][0] for x in response["results"]] diff --git a/posthog/clickhouse/cluster.py b/posthog/clickhouse/cluster.py index 3aa67c94ff3b5..75c91db9da75f 100644 --- a/posthog/clickhouse/cluster.py +++ b/posthog/clickhouse/cluster.py @@ -52,7 +52,9 @@ def result( class ConnectionInfo(NamedTuple): address: str - port: int + + def make_pool(self) -> ChPool: + return make_ch_pool(host=self.address) class HostInfo(NamedTuple): @@ -67,10 +69,10 @@ class HostInfo(NamedTuple): class ClickhouseCluster: def __init__(self, bootstrap_client: Client, extra_hosts: Sequence[ConnectionInfo] | None = None) -> None: self.__hosts = [ - HostInfo(ConnectionInfo(host_address, port), shard_num, replica_num) - for (host_address, port, shard_num, replica_num) in bootstrap_client.execute( + HostInfo(ConnectionInfo(host_address), shard_num, replica_num) + for (host_address, shard_num, replica_num) in bootstrap_client.execute( """ - SELECT host_address, port, shard_num, replica_num + SELECT host_address, shard_num, replica_num FROM system.clusters WHERE name = %(name)s ORDER BY shard_num, replica_num @@ -87,7 +89,7 @@ def __init__(self, bootstrap_client: Client, extra_hosts: Sequence[ConnectionInf def __get_task_function(self, host: HostInfo, fn: Callable[[Client], T]) -> Callable[[], T]: pool = self.__pools.get(host) if pool is None: - pool = self.__pools[host] = make_ch_pool(host=host.connection_info.address, port=host.connection_info.port) + pool = self.__pools[host] = host.connection_info.make_pool() def task(): with pool.get_client() as client: diff --git a/posthog/clickhouse/materialized_columns.py b/posthog/clickhouse/materialized_columns.py index 2ff858274ab4d..09b2d8b24c6dc 100644 --- a/posthog/clickhouse/materialized_columns.py +++ b/posthog/clickhouse/materialized_columns.py @@ -1,6 +1,6 @@ -from datetime import timedelta +from typing import Protocol -from posthog.cache_utils import cache_for +from posthog.models.instance_setting import get_instance_setting from posthog.models.property import PropertyName, TableColumn, TableWithProperties from posthog.settings import EE_AVAILABLE @@ -8,19 +8,25 @@ ColumnName = str TablesWithMaterializedColumns = TableWithProperties + +class MaterializedColumn(Protocol): + name: ColumnName + is_nullable: bool + + if EE_AVAILABLE: - from ee.clickhouse.materialized_columns.columns import get_materialized_columns -else: + from ee.clickhouse.materialized_columns.columns import get_enabled_materialized_columns - def get_materialized_columns( - table: TablesWithMaterializedColumns, - exclude_disabled_columns: bool = False, - ) -> dict[tuple[PropertyName, TableColumn], ColumnName]: - return {} + def get_materialized_column_for_property( + table: TablesWithMaterializedColumns, table_column: TableColumn, property_name: PropertyName + ) -> MaterializedColumn | None: + if not get_instance_setting("MATERIALIZED_COLUMNS_ENABLED"): + return None + return get_enabled_materialized_columns(table).get((property_name, table_column)) +else: -@cache_for(timedelta(minutes=15)) -def get_enabled_materialized_columns( - table: TablesWithMaterializedColumns, -) -> dict[tuple[PropertyName, TableColumn], ColumnName]: - return get_materialized_columns(table, exclude_disabled_columns=True) + def get_materialized_column_for_property( + table: TablesWithMaterializedColumns, table_column: TableColumn, property_name: PropertyName + ) -> MaterializedColumn | None: + return None diff --git a/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py b/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py index 7e7847c570bac..1be2a1c033c66 100644 --- a/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py +++ b/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py @@ -1,6 +1,6 @@ from infi.clickhouse_orm import migrations -from posthog.clickhouse.materialized_columns import get_materialized_columns +from posthog.clickhouse.materialized_columns import get_materialized_column_for_property from posthog.client import sync_execute from posthog.settings import CLICKHOUSE_CLUSTER @@ -45,9 +45,9 @@ def materialize_session_and_window_id(database): properties = ["$session_id", "$window_id"] for property_name in properties: - materialized_columns = get_materialized_columns("events") + current_materialized_column = get_materialized_column_for_property("events", "properties", property_name) # If the column is not materialized, materialize it - if (property_name, "properties") not in materialized_columns: + if current_materialized_column is None: materialize("events", property_name, property_name) # Now, we need to clean up any potentail inconsistencies with existing column names @@ -71,9 +71,8 @@ def materialize_session_and_window_id(database): # materialized the column or renamed the column, and then ran the 0004_... async migration # before this migration runs. possible_old_column_names = {"mat_" + property_name} - current_materialized_column_name = materialized_columns.get((property_name, "properties"), None) - if current_materialized_column_name is not None and current_materialized_column_name != property_name: - possible_old_column_names.add(current_materialized_column_name) + if current_materialized_column is not None and current_materialized_column.name != property_name: + possible_old_column_names.add(current_materialized_column.name) for possible_old_column_name in possible_old_column_names: ensure_only_new_column_exists(database, "sharded_events", possible_old_column_name, property_name) diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index 37fea932f2014..418e2f6354807 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -6,7 +6,11 @@ from typing import Literal, Optional, Union, cast from uuid import UUID -from posthog.clickhouse.materialized_columns import TablesWithMaterializedColumns, get_enabled_materialized_columns +from posthog.clickhouse.materialized_columns import ( + MaterializedColumn, + TablesWithMaterializedColumns, + get_materialized_column_for_property, +) from posthog.clickhouse.property_groups import property_groups from posthog.hogql import ast from posthog.hogql.base import AST, _T_AST @@ -197,6 +201,7 @@ class JoinExprResponse: class PrintableMaterializedColumn: table: Optional[str] column: str + is_nullable: bool def __str__(self) -> str: if self.table is None: @@ -1321,10 +1326,11 @@ def __get_all_materialized_property_sources( field_name = cast(Union[Literal["properties"], Literal["person_properties"]], field.name) materialized_column = self._get_materialized_column(table_name, property_name, field_name) - if materialized_column: + if materialized_column is not None: yield PrintableMaterializedColumn( self.visit(field_type.table_type), - self._print_identifier(materialized_column), + self._print_identifier(materialized_column.name), + is_nullable=materialized_column.is_nullable, ) if self.context.modifiers.propertyGroupsMode in ( @@ -1352,8 +1358,12 @@ def __get_all_materialized_property_sources( materialized_column = self._get_materialized_column("events", property_name, "person_properties") else: materialized_column = self._get_materialized_column("person", property_name, "properties") - if materialized_column: - yield PrintableMaterializedColumn(None, self._print_identifier(materialized_column)) + if materialized_column is not None: + yield PrintableMaterializedColumn( + None, + self._print_identifier(materialized_column.name), + is_nullable=materialized_column.is_nullable, + ) def visit_property_type(self, type: ast.PropertyType): if type.joined_subquery is not None and type.joined_subquery_field_name is not None: @@ -1361,7 +1371,10 @@ def visit_property_type(self, type: ast.PropertyType): materialized_property_source = self.__get_materialized_property_source_for_property_type(type) if materialized_property_source is not None: - if isinstance(materialized_property_source, PrintableMaterializedColumn): + if ( + isinstance(materialized_property_source, PrintableMaterializedColumn) + and not materialized_property_source.is_nullable + ): # TODO: rematerialize all columns to properly support empty strings and "null" string values. if self.context.modifiers.materializationMode == MaterializationMode.LEGACY_NULL_AS_STRING: materialized_property_sql = f"nullIf({materialized_property_source}, '')" @@ -1511,9 +1524,10 @@ def _unsafe_json_extract_trim_quotes(self, unsafe_field: str, unsafe_args: list[ def _get_materialized_column( self, table_name: str, property_name: PropertyName, field_name: TableColumn - ) -> Optional[str]: - materialized_columns = get_enabled_materialized_columns(cast(TablesWithMaterializedColumns, table_name)) - return materialized_columns.get((property_name, field_name), None) + ) -> MaterializedColumn | None: + return get_materialized_column_for_property( + cast(TablesWithMaterializedColumns, table_name), field_name, property_name + ) def _get_timezone(self) -> str: return self.context.database.get_timezone() if self.context.database else "UTC" diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py index 8d7dad46040ac..4f2422263d0c8 100644 --- a/posthog/hogql/test/test_printer.py +++ b/posthog/hogql/test/test_printer.py @@ -460,14 +460,22 @@ def test_hogql_properties_materialized_json_access(self): self.assertEqual(1 + 2, 3) return - materialize("events", "withmat") context = HogQLContext(team_id=self.team.pk) + materialize("events", "withmat") self.assertEqual( self._expr("properties.withmat.json.yet", context), "replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(nullIf(nullIf(events.mat_withmat, ''), 'null'), %(hogql_val_0)s, %(hogql_val_1)s), ''), 'null'), '^\"|\"$', '')", ) self.assertEqual(context.values, {"hogql_val_0": "json", "hogql_val_1": "yet"}) + context = HogQLContext(team_id=self.team.pk) + materialize("events", "withmat_nullable", is_nullable=True) + self.assertEqual( + self._expr("properties.withmat_nullable.json.yet", context), + "replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.mat_withmat_nullable, %(hogql_val_0)s, %(hogql_val_1)s), ''), 'null'), '^\"|\"$', '')", + ) + self.assertEqual(context.values, {"hogql_val_0": "json", "hogql_val_1": "yet"}) + def test_materialized_fields_and_properties(self): try: from ee.clickhouse.materialized_columns.analyze import materialize @@ -499,6 +507,12 @@ def test_materialized_fields_and_properties(self): "nullIf(nullIf(events.`mat_$browser_______`, ''), 'null')", ) + materialize("events", "nullable_property", is_nullable=True) + self.assertEqual( + self._expr("properties['nullable_property']"), + "events.mat_nullable_property", + ) + def test_property_groups(self): context = HogQLContext( team_id=self.team.pk, diff --git a/posthog/hogql/transforms/property_types.py b/posthog/hogql/transforms/property_types.py index 6dbac74590da6..e561607629f1f 100644 --- a/posthog/hogql/transforms/property_types.py +++ b/posthog/hogql/transforms/property_types.py @@ -1,6 +1,10 @@ -from typing import Literal, Optional, cast +from typing import Literal, cast -from posthog.clickhouse.materialized_columns import TablesWithMaterializedColumns, get_enabled_materialized_columns +from posthog.clickhouse.materialized_columns import ( + MaterializedColumn, + TablesWithMaterializedColumns, + get_materialized_column_for_property, +) from posthog.hogql import ast from posthog.hogql.context import HogQLContext from posthog.hogql.database.models import ( @@ -258,7 +262,7 @@ def _add_property_notice( message = f"{property_type.capitalize()} property '{property_name}' is of type '{field_type}'." if self.context.debug: - if materialized_column: + if materialized_column is not None: message += " This property is materialized ⚡️." else: message += " This property is not materialized 🐢." @@ -277,6 +281,7 @@ def _add_notice(self, node: ast.Field, message: str): def _get_materialized_column( self, table_name: str, property_name: PropertyName, field_name: TableColumn - ) -> Optional[str]: - materialized_columns = get_enabled_materialized_columns(cast(TablesWithMaterializedColumns, table_name)) - return materialized_columns.get((property_name, field_name), None) + ) -> MaterializedColumn | None: + return get_materialized_column_for_property( + cast(TablesWithMaterializedColumns, table_name), field_name, property_name + ) diff --git a/posthog/hogql_queries/actors_query_runner.py b/posthog/hogql_queries/actors_query_runner.py index cec4b7019f212..cde5bd2d6311f 100644 --- a/posthog/hogql_queries/actors_query_runner.py +++ b/posthog/hogql_queries/actors_query_runner.py @@ -3,7 +3,7 @@ from collections.abc import Sequence, Iterator from posthog.hogql import ast -from posthog.hogql.constants import HogQLGlobalSettings +from posthog.hogql.constants import HogQLGlobalSettings, HogQLQuerySettings from posthog.hogql.parser import parse_expr, parse_order_expr from posthog.hogql.property import has_aggregation from posthog.hogql.resolver_utils import extract_select_queries @@ -307,6 +307,7 @@ def to_query(self) -> ast.SelectQuery: having=having, group_by=group_by if has_any_aggregation else None, order_by=order_by, + settings=HogQLQuerySettings(join_algorithm="auto", optimize_aggregation_in_order=True), ) def to_actors_query(self) -> ast.SelectQuery: diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr index 2315f2b51ebf6..2f2933fb62433 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr @@ -193,7 +193,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.created_at DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -612,7 +614,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.created_at DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -730,7 +734,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.created_at DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -848,7 +854,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.created_at DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1848,7 +1856,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1989,7 +1999,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2130,7 +2142,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2271,7 +2285,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr index f95e83e21b1d9..4573056cf6cac 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr @@ -482,7 +482,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -673,7 +675,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -864,7 +868,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1055,7 +1061,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1408,7 +1416,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1599,7 +1609,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1790,7 +1802,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1981,7 +1995,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2520,7 +2536,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2635,7 +2653,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2750,7 +2770,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2865,7 +2887,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3128,7 +3152,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3243,7 +3269,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3490,7 +3518,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3605,7 +3635,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3720,7 +3752,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3835,7 +3869,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4098,7 +4134,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4213,7 +4251,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4482,7 +4522,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4604,7 +4646,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4726,7 +4770,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4848,7 +4894,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5264,7 +5312,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5386,7 +5436,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5508,7 +5560,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5630,7 +5684,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -6046,7 +6102,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -6168,7 +6226,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -6290,7 +6350,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -6412,7 +6474,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -6828,7 +6892,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -6950,7 +7016,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -7072,7 +7140,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -7194,7 +7264,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -7610,7 +7682,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -7732,7 +7806,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -7854,7 +7930,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -7976,7 +8054,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr index ca6d26d135828..ea2c02c121f49 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr @@ -163,7 +163,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -428,7 +430,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -619,7 +623,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr index dcec437b05683..f1f604cc85b02 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr @@ -107,7 +107,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -239,7 +241,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -374,7 +378,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr index 0912fa7845d36..71680063ab927 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr @@ -358,7 +358,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -493,7 +495,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -628,7 +632,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -763,7 +769,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1026,7 +1034,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1161,7 +1171,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1296,7 +1308,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1431,7 +1445,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1820,7 +1836,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1887,7 +1905,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1954,7 +1974,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2021,7 +2043,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2202,7 +2226,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2269,7 +2295,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2434,7 +2462,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2501,7 +2531,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2568,7 +2600,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2635,7 +2669,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2816,7 +2852,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2883,7 +2921,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3070,7 +3110,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3144,7 +3186,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3218,7 +3262,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3292,7 +3338,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3592,7 +3640,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3666,7 +3716,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3740,7 +3792,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3814,7 +3868,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4114,7 +4170,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4188,7 +4246,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4262,7 +4322,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4336,7 +4398,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4636,7 +4700,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4710,7 +4776,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4784,7 +4852,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4858,7 +4928,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5158,7 +5230,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5232,7 +5306,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5306,7 +5382,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5380,7 +5458,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr index f623ea36204cd..d2d6bbab5f69f 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr @@ -162,7 +162,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -349,7 +351,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -536,7 +540,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr index 9fbd6af6c74ed..38542d31104b9 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr @@ -53,7 +53,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -131,7 +133,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -209,7 +213,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr index 163abb23ac305..d06597a0b35da 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr @@ -754,7 +754,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -872,7 +874,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -990,7 +994,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1108,7 +1114,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr index f2e6752d368d0..651f296097a7b 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr @@ -122,7 +122,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -269,7 +271,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -416,7 +420,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr index 846e534decf6a..3a0a96ffa7162 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr @@ -53,7 +53,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -131,7 +133,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -209,7 +213,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr index f0bbdae5329d3..21eb841990a53 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr @@ -554,7 +554,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -637,7 +639,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -720,7 +724,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -803,7 +809,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr index e735153b628c3..5eafb6901598c 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr @@ -148,7 +148,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -321,7 +323,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -494,7 +498,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr index 6e86eda210324..36d25b420b5ee 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr @@ -51,7 +51,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -127,7 +129,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -203,7 +207,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr index cb6d8db14b8ce..545e7fa4d506c 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr @@ -115,7 +115,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.created_at DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -419,7 +421,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.created_at DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -491,7 +495,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.created_at DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -563,7 +569,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.created_at DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1258,7 +1266,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1341,7 +1351,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1424,7 +1436,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1507,7 +1521,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr index 3f5e9e4467e64..c52ab6eb60b8d 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr @@ -1471,7 +1471,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2310,7 +2312,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2572,7 +2576,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2834,7 +2840,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr index a4bfbc566ff43..4315f4b9bba92 100644 --- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr +++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr @@ -266,7 +266,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr index b8db8df7c3613..b2a235c2308e0 100644 --- a/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr +++ b/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr @@ -66,7 +66,8 @@ WHERE equals(person.team_id, 99999) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.properties___name ASC) + ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto') LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -154,7 +155,8 @@ and isNull(toStartOfDay(parseDateTime64BestEffortOrNull('2020-01-12', 6, 'US/Pacific')))), ifNull(equals(status, 'returning'), 0))) AS source))) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.properties___name ASC) + ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto') LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -192,7 +194,8 @@ WHERE equals(groups.team_id, 99999) GROUP BY groups.group_type_index, groups.group_key) AS groups ON equals(groups.key, source.group_key) - ORDER BY groups.properties___name ASC) + ORDER BY groups.properties___name ASC SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto') LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -258,7 +261,8 @@ WHERE ifNull(equals(num_intervals, 2), 0)) AS source))) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.properties___name ASC) + ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto') LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -291,7 +295,8 @@ WHERE equals(groups.team_id, 99999) GROUP BY groups.group_type_index, groups.group_key) AS groups ON equals(groups.key, source.actor_id) - ORDER BY groups.properties___name ASC) + ORDER BY groups.properties___name ASC SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto') LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -357,7 +362,8 @@ GROUP BY actor_id) AS source))) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.properties___name ASC) + ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto') LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, @@ -432,7 +438,8 @@ GROUP BY actor_id) AS source))) GROUP BY person.id HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) - ORDER BY persons.properties___name ASC) + ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto') LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1, diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr index 20f2012034bba..905fb297fe4a9 100644 --- a/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr +++ b/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr @@ -1301,7 +1301,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1448,7 +1450,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1595,7 +1599,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1744,7 +1750,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1891,7 +1899,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2038,7 +2048,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2187,7 +2199,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2334,7 +2348,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2481,7 +2497,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2723,7 +2741,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -2890,7 +2910,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3043,7 +3065,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3208,7 +3232,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3383,7 +3409,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3723,7 +3751,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -3967,7 +3997,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4211,7 +4243,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4455,7 +4489,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4849,7 +4885,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -4996,7 +5034,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5225,7 +5265,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5454,7 +5496,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5601,7 +5645,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -5748,7 +5794,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -13155,7 +13203,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -13302,7 +13352,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -13449,7 +13501,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -13598,7 +13652,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -13745,7 +13801,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -13892,7 +13950,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -14041,7 +14101,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -14188,7 +14250,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -14335,7 +14399,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -14577,7 +14643,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -14744,7 +14812,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -14897,7 +14967,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -15062,7 +15134,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -15237,7 +15311,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -15577,7 +15653,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -15821,7 +15899,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -16065,7 +16145,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -16309,7 +16391,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -16703,7 +16787,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -16850,7 +16936,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -17079,7 +17167,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -17308,7 +17398,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -17455,7 +17547,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -17602,7 +17696,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY persons.id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr index d726c177939a3..477e07b03d8f9 100644 --- a/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr +++ b/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr @@ -63,7 +63,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY length(source.appearances) DESC, source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -164,7 +166,9 @@ groups.group_key) AS groups ON equals(groups.key, source.actor_id) ORDER BY length(source.appearances) DESC, source.actor_id ASC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr index 4b096b060262e..9bd3a90b8d559 100644 --- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr +++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr @@ -42,11 +42,14 @@ # name: TestTrends.test_action_filtering_with_cohort.4 ''' /* celery:posthog.tasks.calculate_cohort.clear_stale_cohort */ - SELECT count() + SELECT team_id, + count() AS stale_people_count FROM cohortpeople - WHERE team_id = 99999 + WHERE team_id IN [1, 2, 3, 4, 5 /* ... */] AND cohort_id = 99999 AND version < 2 + GROUP BY team_id + HAVING stale_people_count > 0 ''' # --- # name: TestTrends.test_action_filtering_with_cohort.5 @@ -138,11 +141,14 @@ # name: TestTrends.test_action_filtering_with_cohort_poe_v2.4 ''' /* celery:posthog.tasks.calculate_cohort.clear_stale_cohort */ - SELECT count() + SELECT team_id, + count() AS stale_people_count FROM cohortpeople - WHERE team_id = 99999 + WHERE team_id IN [1, 2, 3, 4, 5 /* ... */] AND cohort_id = 99999 AND version < 2 + GROUP BY team_id + HAVING stale_people_count > 0 ''' # --- # name: TestTrends.test_action_filtering_with_cohort_poe_v2.5 @@ -278,7 +284,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY source.event_count DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, @@ -1422,7 +1430,9 @@ HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id) ORDER BY source.event_count DESC LIMIT 101 - OFFSET 0 SETTINGS readonly=2, + OFFSET 0 SETTINGS optimize_aggregation_in_order=1, + join_algorithm='auto', + readonly=2, max_execution_time=60, allow_experimental_object_type=1, format_csv_allow_double_quotes=0, diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index 668cd8b2afb48..c7de458195b2f 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -219,7 +219,10 @@ def to_actors_query_options(self) -> InsightActorsQueryOptionsResponse: for value in self.query.breakdownFilter.breakdown: if value != "all" and str(value) != "0": res_breakdown.append( - BreakdownItem(label=Cohort.objects.get(pk=int(value), team=self.team).name, value=value) + BreakdownItem( + label=Cohort.objects.get(pk=int(value), team__project_id=self.team.project_id).name, + value=value, + ) ) else: res_breakdown.append(BreakdownItem(label="all users", value="all")) diff --git a/posthog/hogql_queries/test/test_actors_query_runner.py b/posthog/hogql_queries/test/test_actors_query_runner.py index 904c1adad8d9f..36a12166cb589 100644 --- a/posthog/hogql_queries/test/test_actors_query_runner.py +++ b/posthog/hogql_queries/test/test_actors_query_runner.py @@ -1,3 +1,5 @@ +from typing import cast + import pytest from posthog.hogql import ast @@ -66,7 +68,7 @@ def test_default_persons_query(self): runner = self._create_runner(ActorsQuery()) query = runner.to_query() - query = clear_locations(query) + query = cast(ast.SelectQuery, clear_locations(query)) expected = ast.SelectQuery( select=[ ast.Field(chain=["id"]), @@ -78,7 +80,8 @@ def test_default_persons_query(self): where=None, order_by=[ast.OrderExpr(expr=ast.Field(chain=["created_at"]), order="DESC")], ) - assert clear_locations(query) == expected + query.settings = None + assert query == expected response = runner.calculate() assert len(response.results) == 10 diff --git a/posthog/models/cohort/cohort.py b/posthog/models/cohort/cohort.py index 1ab980bfc6796..f658bbd07e6de 100644 --- a/posthog/models/cohort/cohort.py +++ b/posthog/models/cohort/cohort.py @@ -250,11 +250,16 @@ def calculate_people_ch(self, pending_version: int, *, initiating_user_id: Optio clear_stale_cohort.delay(self.pk, before_version=pending_version) - def insert_users_by_list(self, items: list[str]) -> None: - """ - Items is a list of distinct_ids + def insert_users_by_list(self, items: list[str], *, team_id: Optional[int] = None) -> None: """ + Insert a list of users identified by their distinct ID into the cohort, for the given team. + Args: + items: List of distinct IDs of users to be inserted into the cohort. + team_id: ID of the team for which to insert the users. Defaults to `self.team`, because of a lot of existing usage in tests. + """ + if team_id is None: + team_id = self.team_id batchsize = 1000 from posthog.models.cohort.util import ( insert_static_cohort, @@ -272,10 +277,10 @@ def insert_users_by_list(self, items: list[str]) -> None: for i in range(0, len(items), batchsize): batch = items[i : i + batchsize] persons_query = ( - Person.objects.filter(team_id=self.team_id) + Person.objects.filter(team_id=team_id) .filter( Q( - persondistinctid__team_id=self.team_id, + persondistinctid__team_id=team_id, persondistinctid__distinct_id__in=batch, ) ) @@ -284,7 +289,7 @@ def insert_users_by_list(self, items: list[str]) -> None: insert_static_cohort( list(persons_query.values_list("uuid", flat=True)), self.pk, - self.team, + team_id=team_id, ) sql, params = persons_query.distinct("pk").only("pk").query.sql_with_params() query = UPDATE_QUERY.format( @@ -297,7 +302,7 @@ def insert_users_by_list(self, items: list[str]) -> None: ) cursor.execute(query, params) - count = get_static_cohort_size(self) + count = get_static_cohort_size(cohort_id=self.id, team_id=self.team_id) self.count = count self.is_calculating = False @@ -313,7 +318,18 @@ def insert_users_by_list(self, items: list[str]) -> None: self.save() capture_exception(err) - def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool = False, batchsize=1000) -> None: + def insert_users_list_by_uuid( + self, items: list[str], insert_in_clickhouse: bool = False, batchsize=1000, *, team_id: int + ) -> None: + """ + Insert a list of users identified by their UUID into the cohort, for the given team. + + Args: + items: List of user UUIDs to be inserted into the cohort. + insert_in_clickhouse: Whether the data should also be inserted into ClickHouse. + batchsize: Number of UUIDs to process in each batch. + team_id: The ID of the team to which the cohort belongs. + """ from posthog.models.cohort.util import get_static_cohort_size, insert_static_cohort try: @@ -321,13 +337,13 @@ def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool for i in range(0, len(items), batchsize): batch = items[i : i + batchsize] persons_query = ( - Person.objects.filter(team_id=self.team_id).filter(uuid__in=batch).exclude(cohort__id=self.id) + Person.objects.filter(team_id=team_id).filter(uuid__in=batch).exclude(cohort__id=self.id) ) if insert_in_clickhouse: insert_static_cohort( list(persons_query.values_list("uuid", flat=True)), self.pk, - self.team, + team_id=team_id, ) sql, params = persons_query.distinct("pk").only("pk").query.sql_with_params() query = UPDATE_QUERY.format( @@ -340,7 +356,7 @@ def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool ) cursor.execute(query, params) - count = get_static_cohort_size(self) + count = get_static_cohort_size(cohort_id=self.id, team_id=self.team_id) self.count = count self.is_calculating = False @@ -357,12 +373,6 @@ def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool self.save() capture_exception(err) - def _clickhouse_persons_query(self, batch_size=10000, offset=0): - from posthog.models.cohort.util import get_person_ids_by_cohort_id - - uuids = get_person_ids_by_cohort_id(team=self.team, cohort_id=self.pk, limit=batch_size, offset=offset) - return Person.objects.filter(uuid__in=uuids, team=self.team) - __repr__ = sane_repr("id", "name", "last_calculation") diff --git a/posthog/models/cohort/sql.py b/posthog/models/cohort/sql.py index a84394bae94a8..603f8addf08a2 100644 --- a/posthog/models/cohort/sql.py +++ b/posthog/models/cohort/sql.py @@ -91,6 +91,8 @@ """ STALE_COHORTPEOPLE = f""" -SELECT count() FROM cohortpeople -WHERE team_id = %(team_id)s AND cohort_id = %(cohort_id)s AND version < %(version)s +SELECT team_id, count() AS stale_people_count FROM cohortpeople +WHERE team_id IN %(team_ids)s AND cohort_id = %(cohort_id)s AND version < %(version)s +GROUP BY team_id +HAVING stale_people_count > 0 """ diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py index fe589236fa62e..395085453c5e3 100644 --- a/posthog/models/cohort/util.py +++ b/posthog/models/cohort/util.py @@ -34,13 +34,10 @@ STALE_COHORTPEOPLE, ) from posthog.models.person.sql import ( - GET_LATEST_PERSON_SQL, - GET_PERSON_IDS_BY_FILTER, INSERT_PERSON_STATIC_COHORT, PERSON_STATIC_COHORT_TABLE, ) from posthog.models.property import Property, PropertyGroup -from posthog.queries.insight import insight_sync_execute from posthog.queries.person_distinct_id_query import get_team_distinct_ids_query # temporary marker to denote when cohortpeople table started being populated @@ -75,14 +72,14 @@ def format_person_query(cohort: Cohort, index: int, hogql_context: HogQLContext) return query, params -def print_cohort_hogql_query(cohort: Cohort, hogql_context: HogQLContext) -> str: +def print_cohort_hogql_query(cohort: Cohort, hogql_context: HogQLContext, *, team: Team) -> str: from posthog.hogql_queries.query_runner import get_query_runner if not cohort.query: raise ValueError("Cohort has no query") query = get_query_runner( - cast(dict, cohort.query), team=cast(Team, cohort.team), limit_context=LimitContext.COHORT_CALCULATION + cast(dict, cohort.query), team=team, limit_context=LimitContext.COHORT_CALCULATION ).to_query() for select_query in extract_select_queries(query): @@ -109,7 +106,7 @@ def print_cohort_hogql_query(cohort: Cohort, hogql_context: HogQLContext) -> str hogql_context.enable_select_queries = True hogql_context.limit_top_select = False - create_default_modifiers_for_team(cohort.team, hogql_context.modifiers) + create_default_modifiers_for_team(team, hogql_context.modifiers) return print_ast(query, context=hogql_context, dialect="clickhouse") @@ -262,10 +259,7 @@ def format_filter_query( def format_cohort_subquery( - cohort: Cohort, - index: int, - hogql_context: HogQLContext, - custom_match_field="person_id", + cohort: Cohort, index: int, hogql_context: HogQLContext, custom_match_field="person_id" ) -> tuple[str, dict[str, Any]]: is_precalculated = is_precalculated_query(cohort) if is_precalculated: @@ -277,46 +271,13 @@ def format_cohort_subquery( return person_query, params -def get_person_ids_by_cohort_id( - team: Team, - cohort_id: int, - limit: Optional[int] = None, - offset: Optional[int] = None, -): - from posthog.models.property.util import parse_prop_grouped_clauses - - filter = Filter(data={"properties": [{"key": "id", "value": cohort_id, "type": "cohort"}]}) - filter_query, filter_params = parse_prop_grouped_clauses( - team_id=team.pk, - property_group=filter.property_groups, - table_name="pdi", - hogql_context=filter.hogql_context, - ) - - results = insight_sync_execute( - GET_PERSON_IDS_BY_FILTER.format( - person_query=GET_LATEST_PERSON_SQL, - distinct_query=filter_query, - query="", - GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team.pk), - offset="OFFSET %(offset)s" if offset else "", - limit="ORDER BY _timestamp ASC LIMIT %(limit)s" if limit else "", - ), - {**filter_params, "team_id": team.pk, "offset": offset, "limit": limit}, - query_type="get_person_ids_by_cohort_id", - team_id=team.pk, - ) - - return [str(row[0]) for row in results] - - -def insert_static_cohort(person_uuids: list[Optional[uuid.UUID]], cohort_id: int, team: Team): +def insert_static_cohort(person_uuids: list[Optional[uuid.UUID]], cohort_id: int, *, team_id: int): persons = [ { "id": str(uuid.uuid4()), "person_id": str(person_uuid), "cohort_id": cohort_id, - "team_id": team.pk, + "team_id": team_id, "_timestamp": datetime.now(), } for person_uuid in person_uuids @@ -324,12 +285,12 @@ def insert_static_cohort(person_uuids: list[Optional[uuid.UUID]], cohort_id: int sync_execute(INSERT_PERSON_STATIC_COHORT, persons) -def get_static_cohort_size(cohort: Cohort) -> Optional[int]: +def get_static_cohort_size(*, cohort_id: int, team_id: int) -> Optional[int]: count_result = sync_execute( GET_STATIC_COHORT_SIZE_SQL, { - "cohort_id": cohort.pk, - "team_id": cohort.team_id, + "cohort_id": cohort_id, + "team_id": team_id, }, ) @@ -342,22 +303,39 @@ def get_static_cohort_size(cohort: Cohort) -> Optional[int]: def recalculate_cohortpeople( cohort: Cohort, pending_version: int, *, initiating_user_id: Optional[int] ) -> Optional[int]: - hogql_context = HogQLContext(within_non_hogql_query=True, team_id=cohort.team_id) + """ + Recalculate cohort people for all environments of the project. + NOTE: Currently this only returns the count for the team where the cohort was created. Instead it should return for all teams. + """ + relevant_teams = Team.objects.order_by("id").filter(project_id=cohort.team.project_id) + count_by_team_id: dict[int, int] = {} + for team in relevant_teams: + count_for_team = _recalculate_cohortpeople_for_team( + cohort, pending_version, team, initiating_user_id=initiating_user_id + ) + count_by_team_id[team.id] = count_for_team or 0 + return count_by_team_id[cohort.team_id] + + +def _recalculate_cohortpeople_for_team( + cohort: Cohort, pending_version: int, team: Team, *, initiating_user_id: Optional[int] +) -> Optional[int]: + hogql_context = HogQLContext(within_non_hogql_query=True, team_id=team.id) cohort_query, cohort_params = format_person_query(cohort, 0, hogql_context) - before_count = get_cohort_size(cohort) + before_count = get_cohort_size(cohort, team_id=team.id) if before_count: logger.warn( "Recalculating cohortpeople starting", - team_id=cohort.team_id, + team_id=team.id, cohort_id=cohort.pk, size_before=before_count, ) recalcluate_cohortpeople_sql = RECALCULATE_COHORT_BY_ID.format(cohort_filter=cohort_query) - tag_queries(kind="cohort_calculation", team_id=cohort.team_id, query_type="CohortsQuery") + tag_queries(kind="cohort_calculation", team_id=team.id, query_type="CohortsQuery") if initiating_user_id: tag_queries(user_id=initiating_user_id) @@ -367,7 +345,7 @@ def recalculate_cohortpeople( **cohort_params, **hogql_context.values, "cohort_id": cohort.pk, - "team_id": cohort.team_id, + "team_id": team.id, "new_version": pending_version, }, settings={ @@ -379,12 +357,12 @@ def recalculate_cohortpeople( workload=Workload.OFFLINE, ) - count = get_cohort_size(cohort, override_version=pending_version) + count = get_cohort_size(cohort, override_version=pending_version, team_id=team.id) if count is not None and before_count is not None: logger.warn( "Recalculating cohortpeople done", - team_id=cohort.team_id, + team_id=team.id, cohort_id=cohort.pk, size_before=before_count, size=count, @@ -395,33 +373,40 @@ def recalculate_cohortpeople( def clear_stale_cohortpeople(cohort: Cohort, before_version: int) -> None: if cohort.version and cohort.version > 0: + relevant_team_ids = list(Team.objects.filter(project_id=cohort.team.project_id).values_list("pk", flat=True)) stale_count_result = sync_execute( STALE_COHORTPEOPLE, { "cohort_id": cohort.pk, - "team_id": cohort.team_id, + "team_ids": relevant_team_ids, "version": before_version, }, ) - if stale_count_result and len(stale_count_result) and len(stale_count_result[0]): - stale_count = stale_count_result[0][0] - if stale_count > 0: - # Don't do anything if it already exists - AsyncDeletion.objects.get_or_create( - deletion_type=DeletionType.Cohort_stale, - team_id=cohort.team.pk, - key=f"{cohort.pk}_{before_version}", - ) + team_ids_with_stale_cohortpeople = [row[0] for row in stale_count_result] + if team_ids_with_stale_cohortpeople: + AsyncDeletion.objects.bulk_create( + [ + AsyncDeletion( + deletion_type=DeletionType.Cohort_stale, + team_id=team_id, + # Only appending `team_id` if it's not the same as the cohort's `team_id``, so that + # the migration to environments does not accidentally cause duplicate `AsyncDeletion`s + key=f"{cohort.pk}_{before_version}{('_'+team_id) if team_id != cohort.team_id else ''}", + ) + for team_id in team_ids_with_stale_cohortpeople + ], + ignore_conflicts=True, + ) -def get_cohort_size(cohort: Cohort, override_version: Optional[int] = None) -> Optional[int]: +def get_cohort_size(cohort: Cohort, override_version: Optional[int] = None, *, team_id: int) -> Optional[int]: count_result = sync_execute( GET_COHORT_SIZE_SQL, { "cohort_id": cohort.pk, "version": override_version if override_version is not None else cohort.version, - "team_id": cohort.team_id, + "team_id": team_id, }, workload=Workload.OFFLINE, ) @@ -545,7 +530,7 @@ def get_dependent_cohorts( continue else: current_cohort = Cohort.objects.db_manager(using_database).get( - pk=cohort_id, team_id=cohort.team_id, deleted=False + pk=cohort_id, team__project_id=cohort.team.project_id, deleted=False ) seen_cohorts_cache[cohort_id] = current_cohort if current_cohort.id not in seen_cohort_ids: diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py index c21af6a397117..beca926b7fbac 100644 --- a/posthog/models/feature_flag/feature_flag.py +++ b/posthog/models/feature_flag/feature_flag.py @@ -187,7 +187,7 @@ def transform_cohort_filters_for_easy_evaluation( return self.conditions else: cohort = Cohort.objects.db_manager(using_database).get( - pk=cohort_id, team_id=self.team_id, deleted=False + pk=cohort_id, team__project_id=self.team.project_id, deleted=False ) seen_cohorts_cache[cohort_id] = cohort except Cohort.DoesNotExist: @@ -291,7 +291,7 @@ def get_cohort_ids( continue else: cohort = Cohort.objects.db_manager(using_database).get( - pk=cohort_id, team_id=self.team_id, deleted=False + pk=cohort_id, team__project_id=self.team.project_id, deleted=False ) seen_cohorts_cache[cohort_id] = cohort diff --git a/posthog/models/feature_flag/user_blast_radius.py b/posthog/models/feature_flag/user_blast_radius.py index 712df09ed5002..bf08e8eed950a 100644 --- a/posthog/models/feature_flag/user_blast_radius.py +++ b/posthog/models/feature_flag/user_blast_radius.py @@ -77,7 +77,7 @@ def get_user_blast_radius( if len(cohort_filters) == 1: try: - target_cohort = Cohort.objects.get(id=cohort_filters[0].value, team=team) + target_cohort = Cohort.objects.get(id=cohort_filters[0].value, team__project_id=team.project_id) except Cohort.DoesNotExist: pass finally: diff --git a/posthog/models/filters/mixins/simplify.py b/posthog/models/filters/mixins/simplify.py index b152e07113f11..01f3d2c4d4745 100644 --- a/posthog/models/filters/mixins/simplify.py +++ b/posthog/models/filters/mixins/simplify.py @@ -108,7 +108,7 @@ def _simplify_property(self, team: "Team", property: "Property", **kwargs) -> "P from posthog.models.cohort.util import simplified_cohort_filter_properties try: - cohort = Cohort.objects.get(pk=property.value, team_id=team.pk) + cohort = Cohort.objects.get(pk=property.value, team__project_id=team.project_id) except Cohort.DoesNotExist: # :TODO: Handle non-existing resource in-query instead return PropertyGroup(type=PropertyOperatorType.AND, values=[property]) diff --git a/posthog/models/property/util.py b/posthog/models/property/util.py index 90651b6cd1e5f..ef63b2f69c670 100644 --- a/posthog/models/property/util.py +++ b/posthog/models/property/util.py @@ -14,10 +14,7 @@ from rest_framework import exceptions from posthog.clickhouse.kafka_engine import trim_quotes_expr -from posthog.clickhouse.materialized_columns import ( - TableWithProperties, - get_enabled_materialized_columns, -) +from posthog.clickhouse.materialized_columns import TableWithProperties, get_materialized_column_for_property from posthog.constants import PropertyOperatorType from posthog.hogql import ast from posthog.hogql.hogql import HogQLContext @@ -711,17 +708,18 @@ def get_property_string_expr( (optional) alias of the table being queried :return: """ - materialized_columns = get_enabled_materialized_columns(table) if allow_denormalized_props else {} - table_string = f"{table_alias}." if table_alias is not None and table_alias != "" else "" if ( allow_denormalized_props - and (property_name, materialised_table_column) in materialized_columns + and ( + materialized_column := get_materialized_column_for_property(table, materialised_table_column, property_name) + ) + and not materialized_column.is_nullable and "group" not in materialised_table_column ): return ( - f'{table_string}"{materialized_columns[(property_name, materialised_table_column)]}"', + f'{table_string}"{materialized_column.name}"', True, ) diff --git a/posthog/models/test/test_async_deletion_model.py b/posthog/models/test/test_async_deletion_model.py index 8f4125be67a3c..e5649d6e812d9 100644 --- a/posthog/models/test/test_async_deletion_model.py +++ b/posthog/models/test/test_async_deletion_model.py @@ -365,7 +365,7 @@ def test_delete_auxilary_models_via_team(self): group_key="org:5", properties={}, ) - insert_static_cohort([uuid4()], 0, self.teams[0]) + insert_static_cohort([uuid4()], 0, team_id=self.teams[0].pk) self._insert_cohortpeople_row(self.teams[0], uuid4(), 3) create_plugin_log_entry( team_id=self.teams[0].pk, @@ -403,7 +403,7 @@ def test_delete_auxilary_models_via_team_unrelated(self): group_key="org:5", properties={}, ) - insert_static_cohort([uuid4()], 0, self.teams[1]) + insert_static_cohort([uuid4()], 0, team_id=self.teams[1].pk) self._insert_cohortpeople_row(self.teams[1], uuid4(), 3) create_plugin_log_entry( team_id=self.teams[1].pk, diff --git a/posthog/queries/column_optimizer/foss_column_optimizer.py b/posthog/queries/column_optimizer/foss_column_optimizer.py index 4fffbd1faa350..c998d92480b5a 100644 --- a/posthog/queries/column_optimizer/foss_column_optimizer.py +++ b/posthog/queries/column_optimizer/foss_column_optimizer.py @@ -3,7 +3,7 @@ from typing import Union, cast from collections.abc import Generator -from posthog.clickhouse.materialized_columns import ColumnName, get_enabled_materialized_columns +from posthog.clickhouse.materialized_columns import ColumnName, get_materialized_column_for_property from posthog.constants import TREND_FILTER_TYPE_ACTIONS, FunnelCorrelationType from posthog.models.action.util import ( get_action_tables_and_properties, @@ -72,12 +72,14 @@ def columns_to_query( table_column: str = "properties", ) -> set[ColumnName]: "Transforms a list of property names to what columns are needed for that query" - - materialized_columns = get_enabled_materialized_columns(table) - return { - materialized_columns.get((property_name, table_column), table_column) - for property_name, _, _ in used_properties - } + column_names = set() + for property_name, _, _ in used_properties: + column = get_materialized_column_for_property(table, table_column, property_name) + if column is not None and not column.is_nullable: + column_names.add(column.name) + else: + column_names.add(table_column) + return column_names @cached_property def is_using_person_properties(self) -> bool: diff --git a/posthog/queries/test/__snapshots__/test_trends.ambr b/posthog/queries/test/__snapshots__/test_trends.ambr index 879da96b30821..01ab1c2e0e23e 100644 --- a/posthog/queries/test/__snapshots__/test_trends.ambr +++ b/posthog/queries/test/__snapshots__/test_trends.ambr @@ -22,11 +22,14 @@ # name: TestTrends.test_action_filtering_with_cohort.2 ''' /* celery:posthog.tasks.calculate_cohort.clear_stale_cohort */ - SELECT count() + SELECT team_id, + count() AS stale_people_count FROM cohortpeople - WHERE team_id = 99999 + WHERE team_id IN [1, 2, 3, 4, 5 /* ... */] AND cohort_id = 99999 AND version < 2 + GROUP BY team_id + HAVING stale_people_count > 0 ''' # --- # name: TestTrends.test_action_filtering_with_cohort.3 @@ -110,11 +113,14 @@ # name: TestTrends.test_action_filtering_with_cohort_poe_v2.2 ''' /* celery:posthog.tasks.calculate_cohort.clear_stale_cohort */ - SELECT count() + SELECT team_id, + count() AS stale_people_count FROM cohortpeople - WHERE team_id = 99999 + WHERE team_id IN [1, 2, 3, 4, 5 /* ... */] AND cohort_id = 99999 AND version < 2 + GROUP BY team_id + HAVING stale_people_count > 0 ''' # --- # name: TestTrends.test_action_filtering_with_cohort_poe_v2.3 diff --git a/posthog/tasks/calculate_cohort.py b/posthog/tasks/calculate_cohort.py index 53bc65d9abef6..be28173b5000f 100644 --- a/posthog/tasks/calculate_cohort.py +++ b/posthog/tasks/calculate_cohort.py @@ -1,20 +1,23 @@ import time from typing import Any, Optional +from django.conf import settings + +from posthog.models.team.team import Team import structlog from celery import shared_task from dateutil.relativedelta import relativedelta from django.db.models import F, ExpressionWrapper, DurationField, Q from django.utils import timezone from prometheus_client import Gauge -from sentry_sdk import set_tag +from sentry_sdk import capture_exception, set_tag from datetime import timedelta from posthog.api.monitoring import Feature from posthog.models import Cohort from posthog.models.cohort import get_and_update_pending_version -from posthog.models.cohort.util import clear_stale_cohortpeople +from posthog.models.cohort.util import clear_stale_cohortpeople, get_static_cohort_size from posthog.models.user import User COHORT_RECALCULATIONS_BACKLOG_GAUGE = Gauge( @@ -109,37 +112,65 @@ def calculate_cohort_ch(cohort_id: int, pending_version: int, initiating_user_id @shared_task(ignore_result=True, max_retries=1) -def calculate_cohort_from_list(cohort_id: int, items: list[str]) -> None: +def calculate_cohort_from_list(cohort_id: int, items: list[str], team_id: Optional[int] = None) -> None: + """ + team_id is only optional for backwards compatibility with the old celery task signature. + All new tasks should pass team_id explicitly. + """ start_time = time.time() cohort = Cohort.objects.get(pk=cohort_id) + if team_id is None: + team_id = cohort.team_id - cohort.insert_users_by_list(items) + cohort.insert_users_by_list(items, team_id=team_id) logger.warn("Calculating cohort {} from CSV took {:.2f} seconds".format(cohort.pk, (time.time() - start_time))) @shared_task(ignore_result=True, max_retries=1) -def insert_cohort_from_insight_filter(cohort_id: int, filter_data: dict[str, Any]) -> None: - from posthog.api.cohort import ( - insert_cohort_actors_into_ch, - insert_cohort_people_into_pg, - ) +def insert_cohort_from_insight_filter( + cohort_id: int, filter_data: dict[str, Any], team_id: Optional[int] = None +) -> None: + """ + team_id is only optional for backwards compatibility with the old celery task signature. + All new tasks should pass team_id explicitly. + """ + from posthog.api.cohort import insert_cohort_actors_into_ch, insert_cohort_people_into_pg cohort = Cohort.objects.get(pk=cohort_id) + if team_id is None: + team_id = cohort.team_id - insert_cohort_actors_into_ch(cohort, filter_data) - insert_cohort_people_into_pg(cohort=cohort) + insert_cohort_actors_into_ch(cohort, filter_data, team_id=team_id) + insert_cohort_people_into_pg(cohort, team_id=team_id) @shared_task(ignore_result=True, max_retries=1) -def insert_cohort_from_query(cohort_id: int) -> None: - from posthog.api.cohort import ( - insert_cohort_people_into_pg, - insert_cohort_query_actors_into_ch, - ) +def insert_cohort_from_query(cohort_id: int, team_id: Optional[int] = None) -> None: + """ + team_id is only optional for backwards compatibility with the old celery task signature. + All new tasks should pass team_id explicitly. + """ + from posthog.api.cohort import insert_cohort_people_into_pg, insert_cohort_query_actors_into_ch cohort = Cohort.objects.get(pk=cohort_id) - insert_cohort_query_actors_into_ch(cohort) - insert_cohort_people_into_pg(cohort=cohort) + if team_id is None: + team_id = cohort.team_id + team = Team.objects.get(pk=team_id) + try: + insert_cohort_query_actors_into_ch(cohort, team=team) + insert_cohort_people_into_pg(cohort, team_id=team_id) + cohort.count = get_static_cohort_size(cohort_id=cohort.id, team_id=cohort.team_id) + cohort.errors_calculating = 0 + cohort.last_calculation = timezone.now() + except: + cohort.errors_calculating = F("errors_calculating") + 1 + cohort.last_error_at = timezone.now() + capture_exception() + if settings.DEBUG: + raise + finally: + cohort.is_calculating = False + cohort.save() @shared_task(ignore_result=True, max_retries=1) diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py index 751b8f5db70cc..7657db26c203f 100644 --- a/posthog/tasks/exports/csv_exporter.py +++ b/posthog/tasks/exports/csv_exporter.py @@ -170,19 +170,21 @@ def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]: yield line return elif isinstance(first_result.get("data"), list): + is_comparison = first_result.get("compare_label") + + # take date labels from current results, when comparing against previous + # as previous results will be indexed with offset + date_labels_item = next((x for x in results if x.get("compare_label") == "current"), None) + # TRENDS LIKE for index, item in enumerate(results): label = item.get("label", f"Series #{index + 1}") compare_label = item.get("compare_label", "") series_name = f"{label} - {compare_label}" if compare_label else label - line = {"series": series_name} - # take labels from current results, when comparing against previous - if item.get("compare_label") == "previous": - label_item = results[index - 1] - else: - label_item = item + line = {"series": series_name} + label_item = date_labels_item if is_comparison else item action = item.get("action") if isinstance(action, dict) and action.get("custom_name"): diff --git a/posthog/tasks/exports/test/test_csv_exporter.py b/posthog/tasks/exports/test/test_csv_exporter.py index 29b57da6d7a0b..4d53742ed65bb 100644 --- a/posthog/tasks/exports/test/test_csv_exporter.py +++ b/posthog/tasks/exports/test/test_csv_exporter.py @@ -2,13 +2,13 @@ from typing import Any, Optional from unittest import mock from unittest.mock import MagicMock, Mock, patch, ANY +from dateutil.relativedelta import relativedelta from openpyxl import load_workbook from io import BytesIO import pytest from boto3 import resource from botocore.client import Config -from dateutil.relativedelta import relativedelta from django.test import override_settings from django.utils.timezone import now from requests.exceptions import HTTPError @@ -703,23 +703,75 @@ def test_csv_exporter_trends_query_with_compare_previous_option( self, ) -> None: _create_person(distinct_ids=[f"user_1"], team=self.team) - events_by_person = { - "user_1": [ - { - "event": "$pageview", - "timestamp": datetime(2023, 3, 21, 13, 46), - }, - { - "event": "$pageview", - "timestamp": datetime(2023, 3, 21, 13, 46), - }, - { - "event": "$pageview", - "timestamp": datetime(2023, 3, 22, 13, 47), - }, - ], - } - journeys_for(events_by_person, self.team) + + date = datetime(2023, 3, 21, 13, 46) + date_next_week = date + relativedelta(days=7) + + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date, + properties={"$browser": "Safari"}, + ) + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date, + properties={"$browser": "Chrome"}, + ) + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date, + properties={"$browser": "Chrome"}, + ) + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date, + properties={"$browser": "Firefox"}, + ) + + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date_next_week, + properties={"$browser": "Chrome"}, + ) + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date_next_week, + properties={"$browser": "Chrome"}, + ) + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date_next_week, + properties={"$browser": "Chrome"}, + ) + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date_next_week, + properties={"$browser": "Firefox"}, + ) + _create_event( + event="$pageview", + distinct_id="1", + team=self.team, + timestamp=date_next_week, + properties={"$browser": "Firefox"}, + ) + flush_persons_and_events() exported_asset = ExportedAsset( @@ -728,7 +780,10 @@ def test_csv_exporter_trends_query_with_compare_previous_option( export_context={ "source": { "kind": "TrendsQuery", - "dateRange": {"date_to": "2023-03-22", "date_from": "2023-03-22"}, + "dateRange": { + "date_from": date.strftime("%Y-%m-%d"), + "date_to": date_next_week.strftime("%Y-%m-%d"), + }, "series": [ { "kind": "EventsNode", @@ -738,7 +793,8 @@ def test_csv_exporter_trends_query_with_compare_previous_option( }, ], "interval": "day", - "compareFilter": {"compare": True}, + "compareFilter": {"compare": True, "compare_to": "-1w"}, + "breakdownFilter": {"breakdown": "$browser", "breakdown_type": "event"}, } }, ) @@ -747,5 +803,17 @@ def test_csv_exporter_trends_query_with_compare_previous_option( with self.settings(OBJECT_STORAGE_ENABLED=True, OBJECT_STORAGE_EXPORTS_FOLDER="Test-Exports"): csv_exporter.export_tabular(exported_asset) content = object_storage.read(exported_asset.content_location) # type: ignore - lines = (content or "").strip().split("\r\n") - self.assertEqual(lines, ["series,22-Mar-2023", "$pageview - current,1", "$pageview - previous,2"]) + + lines = (content or "").strip().splitlines() + + expected_lines = [ + "series,21-Mar-2023,22-Mar-2023,23-Mar-2023,24-Mar-2023,25-Mar-2023,26-Mar-2023,27-Mar-2023,28-Mar-2023", + "Chrome - current,2.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0", + "Firefox - current,1.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0", + "Safari - current,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0", + "Chrome - previous,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0", + "Firefox - previous,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0", + "Safari - previous,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0", + ] + + self.assertEqual(lines, expected_lines) diff --git a/posthog/tasks/test/__snapshots__/test_usage_report.ambr b/posthog/tasks/test/__snapshots__/test_usage_report.ambr index 2230c532da5ca..36733da586c57 100644 --- a/posthog/tasks/test/__snapshots__/test_usage_report.ambr +++ b/posthog/tasks/test/__snapshots__/test_usage_report.ambr @@ -3,7 +3,7 @@ ''' SELECT team_id, - multiIf(event LIKE 'helicone%', 'helicone_events', event LIKE 'langfuse%', 'langfuse_events', event LIKE 'keywords_ai%', 'keywords_ai_events', event LIKE 'traceloop%', 'traceloop_events', JSONExtractString(properties, '$lib') = 'web', 'web_events', JSONExtractString(properties, '$lib') = 'js', 'web_lite_events', JSONExtractString(properties, '$lib') = 'posthog-node', 'node_events', JSONExtractString(properties, '$lib') = 'posthog-android', 'android_events', JSONExtractString(properties, '$lib') = 'posthog-flutter', 'flutter_events', JSONExtractString(properties, '$lib') = 'posthog-ios', 'ios_events', JSONExtractString(properties, '$lib') = 'posthog-go', 'go_events', JSONExtractString(properties, '$lib') = 'posthog-java', 'java_events', JSONExtractString(properties, '$lib') = 'posthog-react-native', 'react_native_events', JSONExtractString(properties, '$lib') = 'posthog-ruby', 'ruby_events', JSONExtractString(properties, '$lib') = 'posthog-python', 'python_events', JSONExtractString(properties, '$lib') = 'posthog-php', 'php_events', 'other') AS metric, + multiIf(event LIKE 'helicone%', 'helicone_events', event LIKE 'langfuse%', 'langfuse_events', event LIKE 'keywords_ai%', 'keywords_ai_events', event LIKE 'traceloop%', 'traceloop_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'web', 'web_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'js', 'web_lite_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-node', 'node_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-android', 'android_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-flutter', 'flutter_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-ios', 'ios_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-go', 'go_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-java', 'java_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-react-native', 'react_native_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-ruby', 'ruby_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-python', 'python_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-php', 'php_events', 'other') AS metric, count(1) as count FROM events WHERE timestamp BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59' diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index a2671bd97ebd5..9bd8619317fa6 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -19,7 +19,6 @@ from posthog import version_requirement from posthog.clickhouse.client.connection import Workload -from posthog.clickhouse.materialized_columns import get_enabled_materialized_columns from posthog.client import sync_execute from posthog.cloud_utils import get_cached_instance_license, is_cloud from posthog.constants import FlagRequestType @@ -29,6 +28,7 @@ from posthog.models.feature_flag import FeatureFlag from posthog.models.organization import Organization from posthog.models.plugin import PluginConfig +from posthog.models.property.util import get_property_string_expr from posthog.models.team.team import Team from posthog.models.utils import namedtuplefetchall from posthog.settings import CLICKHOUSE_CLUSTER, INSTANCE_TAG @@ -460,10 +460,8 @@ def get_teams_with_event_count_with_groups_in_period(begin: datetime, end: datet @timed_log() @retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF) def get_all_event_metrics_in_period(begin: datetime, end: datetime) -> dict[str, list[tuple[int, int]]]: - materialized_columns = get_enabled_materialized_columns("events") - # Check if $lib is materialized - lib_expression = materialized_columns.get(("$lib", "properties"), "JSONExtractString(properties, '$lib')") + lib_expression, _ = get_property_string_expr("events", "$lib", "'$lib'", "properties") results = sync_execute( f""" diff --git a/posthog/test/base.py b/posthog/test/base.py index 43dcc0e130964..53f4932f2898f 100644 --- a/posthog/test/base.py +++ b/posthog/test/base.py @@ -30,7 +30,6 @@ from posthog import rate_limit, redis from posthog.clickhouse.client import sync_execute from posthog.clickhouse.client.connection import ch_pool -from posthog.clickhouse.materialized_columns import get_materialized_columns from posthog.clickhouse.plugin_log_entries import TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL from posthog.cloud_utils import TEST_clear_instance_license_cache from posthog.models import Dashboard, DashboardTile, Insight, Organization, Team, User @@ -121,6 +120,8 @@ def clean_varying_query_parts(query, replace_all_numbers): else: query = re.sub(r"(team|cohort)_id(\"?) = \d+", r"\1_id\2 = 99999", query) + query = re.sub(r"(team|cohort)_id(\"?) IN \(\d+(, ?\d+)*\)", r"\1_id\2 IN (1, 2, 3, 4, 5 /* ... */)", query) + query = re.sub(r"(team|cohort)_id(\"?) IN \[\d+(, ?\d+)*\]", r"\1_id\2 IN [1, 2, 3, 4, 5 /* ... */]", query) query = re.sub(r"\d+ as (team|cohort)_id(\"?)", r"99999 as \1_id\2", query) # feature flag conditions use primary keys as columns in queries, so replace those always query = re.sub(r"flag_\d+_condition", r"flag_X_condition", query) @@ -575,35 +576,31 @@ def stripResponse(response, remove=("action", "label", "persons_urls", "filter") return response -def default_materialised_columns(): +def cleanup_materialized_columns(): try: + from ee.clickhouse.materialized_columns.columns import get_materialized_columns from ee.clickhouse.materialized_columns.test.test_columns import EVENTS_TABLE_DEFAULT_MATERIALIZED_COLUMNS except: # EE not available? Skip - return [] - - default_columns = [] - for prop in EVENTS_TABLE_DEFAULT_MATERIALIZED_COLUMNS: - column_name = get_materialized_columns("events")[(prop, "properties")] - default_columns.append(column_name) - - return default_columns - + return -def cleanup_materialized_columns(): def optionally_drop(table, filter=None): drops = ",".join( [ - f"DROP COLUMN {column_name}" - for column_name in get_materialized_columns(table).values() - if filter is None or filter(column_name) + f"DROP COLUMN {column.name}" + for column in get_materialized_columns(table).values() + if filter is None or filter(column.name) ] ) if drops: sync_execute(f"ALTER TABLE {table} {drops} SETTINGS mutations_sync = 2") - default_columns = default_materialised_columns() - optionally_drop("events", lambda name: name not in default_columns) + default_column_names = { + get_materialized_columns("events")[(prop, "properties")].name + for prop in EVENTS_TABLE_DEFAULT_MATERIALIZED_COLUMNS + } + + optionally_drop("events", lambda name: name not in default_column_names) optionally_drop("person") optionally_drop("groups")