+
Watch recordings of how users interact with your web app to see what can be improved.{' '}
([
}),
reducers(({ props }) => ({
- selectedLevel: [
+ selectedLevelRaw: [
(props.settingLevelId ?? 'project') as SettingLevelId,
{
selectLevel: (_, { level }) => level,
selectSection: (_, { level }) => level,
},
],
- selectedSectionId: [
+ selectedSectionIdRaw: [
(props.sectionId ?? null) as SettingSectionId | null,
{
selectLevel: () => null,
@@ -94,6 +94,40 @@ export const settingsLogic = kea([
return sections
},
],
+ selectedLevel: [
+ (s) => [s.selectedLevelRaw, s.selectedSectionIdRaw, s.featureFlags],
+ (selectedLevelRaw, selectedSectionIdRaw, featureFlags): SettingLevelId => {
+ // As of middle of September 2024, `details` and `danger-zone` are the only sections present
+ // at both Environment and Project levels. Others we want to redirect based on the feature flag.
+ if (
+ !selectedSectionIdRaw ||
+ (!selectedSectionIdRaw.endsWith('-details') && !selectedSectionIdRaw.endsWith('-danger-zone'))
+ ) {
+ if (featureFlags[FEATURE_FLAGS.ENVIRONMENTS]) {
+ return selectedLevelRaw === 'project' ? 'environment' : selectedLevelRaw
+ }
+ return selectedLevelRaw === 'environment' ? 'project' : selectedLevelRaw
+ }
+ return selectedLevelRaw
+ },
+ ],
+ selectedSectionId: [
+ (s) => [s.selectedSectionIdRaw, s.featureFlags],
+ (selectedSectionIdRaw, featureFlags): SettingSectionId | null => {
+ if (!selectedSectionIdRaw) {
+ return null
+ }
+ // As of middle of September 2024, `details` and `danger-zone` are the only sections present
+ // at both Environment and Project levels. Others we want to redirect based on the feature flag.
+ if (!selectedSectionIdRaw.endsWith('-details') && !selectedSectionIdRaw.endsWith('-danger-zone')) {
+ if (featureFlags[FEATURE_FLAGS.ENVIRONMENTS]) {
+ return selectedSectionIdRaw.replace(/^project/, 'environment') as SettingSectionId
+ }
+ return selectedSectionIdRaw.replace(/^environment/, 'project') as SettingSectionId
+ }
+ return selectedSectionIdRaw
+ },
+ ],
selectedSection: [
(s) => [s.sections, s.selectedSectionId],
(sections, selectedSectionId): SettingSection | null => {
diff --git a/frontend/src/scenes/settings/settingsSceneLogic.ts b/frontend/src/scenes/settings/settingsSceneLogic.ts
index ac429fd3f3f9b..0813661d4c988 100644
--- a/frontend/src/scenes/settings/settingsSceneLogic.ts
+++ b/frontend/src/scenes/settings/settingsSceneLogic.ts
@@ -51,6 +51,7 @@ export const settingsSceneLogic = kea([
// As of middle of September 2024, `details` and `danger-zone` are the only sections present
// at both Environment and Project levels. Others we want to redirect based on the feature flag.
+ // This is just for URLs, since analogous logic for _rendering_ settings is already in settingsLogic.
if (!section.endsWith('-details') && !section.endsWith('-danger-zone')) {
if (values.featureFlags[FEATURE_FLAGS.ENVIRONMENTS]) {
section = section.replace(/^project/, 'environment')
@@ -73,16 +74,20 @@ export const settingsSceneLogic = kea([
})),
actionToUrl(({ values }) => ({
+ // Replacing history item instead of pushing, so that the environments<>project redirect doesn't affect history
selectLevel({ level }) {
- return [urls.settings(level), router.values.searchParams, router.values.hashParams]
+ return [urls.settings(level), router.values.searchParams, router.values.hashParams, { replace: true }]
},
selectSection({ section }) {
- return [urls.settings(section), router.values.searchParams, router.values.hashParams]
+ return [urls.settings(section), router.values.searchParams, router.values.hashParams, { replace: true }]
},
selectSetting({ setting }) {
- const url = urls.settings(values.selectedSectionId ?? values.selectedLevel, setting)
-
- return [url]
+ return [
+ urls.settings(values.selectedSectionId ?? values.selectedLevel, setting),
+ undefined,
+ undefined,
+ { replace: true },
+ ]
},
})),
])
diff --git a/mypy-baseline.txt b/mypy-baseline.txt
index 8b815fbdb5ec5..78020a95ec1b4 100644
--- a/mypy-baseline.txt
+++ b/mypy-baseline.txt
@@ -1,67 +1,4 @@
posthog/warehouse/models/ssh_tunnel.py:0: error: Incompatible types in assignment (expression has type "NoEncryption", variable has type "BestAvailableEncryption") [assignment]
-posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Statement is unreachable [unreachable]
-posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Non-overlapping equality check (left operand type: "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'json', 'decimal', 'wei', 'date', 'time'] | None", right operand type: "Literal['interval']") [comparison-overlap]
-posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index]
-posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index]
-posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, TColumnSchema]"; expected type "str" [index]
-posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "Incremental[Any] | None" has no attribute "row_order" [union-attr]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "Literal['asc', 'desc'] | Any | None", variable has type "Literal['asc', 'desc']") [assignment]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Column[Any]") [assignment]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Literal['asc', 'desc']") [assignment]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "dict[str, Any] | None" has no attribute "get" [union-attr]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Argument "primary_key" to "make_hints" has incompatible type "list[str] | None"; expected "str | Sequence[str] | Callable[[Any], str | Sequence[str]]" [arg-type]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
-posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment]
-posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/utils.py:0: error: No overload variant of "asdict" matches argument type "type[DataclassInstance]" [call-overload]
posthog/utils.py:0: note: Possible overload variants:
posthog/utils.py:0: note: def asdict(obj: DataclassInstance) -> dict[str, Any]
@@ -322,7 +259,7 @@ ee/billing/billing_manager.py:0: error: Incompatible types in assignment (expres
posthog/models/property/util.py:0: error: Incompatible type for lookup 'pk': (got "str | int | list[str]", expected "str | int") [misc]
posthog/models/property/util.py:0: error: Argument 3 to "format_filter_query" has incompatible type "HogQLContext | None"; expected "HogQLContext" [arg-type]
posthog/models/property/util.py:0: error: Argument 3 to "format_cohort_subquery" has incompatible type "HogQLContext | None"; expected "HogQLContext" [arg-type]
-posthog/models/property/util.py:0: error: Invalid index type "tuple[str, str]" for "dict[tuple[str, Literal['properties', 'group_properties', 'person_properties']], str]"; expected type "tuple[str, Literal['properties', 'group_properties', 'person_properties']]" [index]
+posthog/models/property/util.py:0: error: Argument 2 to "get_materialized_column_for_property" has incompatible type "str"; expected "Literal['properties', 'group_properties', 'person_properties']" [arg-type]
posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has incompatible type "str | int"; expected "str" [arg-type]
posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has incompatible type "str | int"; expected "str" [arg-type]
posthog/models/property/util.py:0: error: Argument 1 to "append" of "list" has incompatible type "str | int"; expected "str" [arg-type]
@@ -332,7 +269,7 @@ posthog/api/documentation.py:0: note: def run_validation(self, data: Any = ...)
posthog/api/documentation.py:0: note: Subclass:
posthog/api/documentation.py:0: note: def run_validation(self, data: Any) -> Any
posthog/queries/trends/util.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | None"; expected "str" [arg-type]
-posthog/queries/column_optimizer/foss_column_optimizer.py:0: error: Argument 1 to "get" of "dict" has incompatible type "tuple[str, str]"; expected "tuple[str, Literal['properties', 'group_properties', 'person_properties']]" [arg-type]
+posthog/queries/column_optimizer/foss_column_optimizer.py:0: error: Argument 2 to "get_materialized_column_for_property" has incompatible type "str"; expected "Literal['properties', 'group_properties', 'person_properties']" [arg-type]
posthog/hogql/property.py:0: error: Incompatible type for lookup 'id': (got "str | int | list[str]", expected "str | int") [misc]
posthog/hogql/property.py:0: error: Incompatible type for lookup 'pk': (got "str | float", expected "str | int") [misc]
posthog/api/utils.py:0: error: Incompatible types in assignment (expression has type "type[EventDefinition]", variable has type "type[EnterpriseEventDefinition]") [assignment]
@@ -470,6 +407,9 @@ posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def
posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> Callable[[Callable[TResourceFunParams, Any]], TDltResourceImpl]
posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str | Callable[[Any], str] = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ..., standalone: Literal[True] = ...) -> Callable[[Callable[TResourceFunParams, Any]], Callable[TResourceFunParams, TDltResourceImpl]]
posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(list[Any] | tuple[Any] | Iterator[Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl
+posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Statement is unreachable [unreachable]
+posthog/temporal/data_imports/pipelines/sql_database_v2/schema_types.py:0: error: Non-overlapping equality check (left operand type: "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'json', 'decimal', 'wei', 'date', 'time'] | None", right operand type: "Literal['interval']") [comparison-overlap]
+posthog/temporal/data_imports/pipelines/sql_database/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/tasks/test/test_update_survey_iteration.py:0: error: Item "None" of "FeatureFlag | None" has no attribute "filters" [union-attr]
posthog/tasks/test/test_stop_surveys_reached_target.py:0: error: No overload variant of "__sub__" of "datetime" matches argument type "None" [operator]
posthog/tasks/test/test_stop_surveys_reached_target.py:0: note: Possible overload variants:
@@ -603,22 +543,10 @@ posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in a
posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "str | int | Combinable") [assignment]
posthog/warehouse/data_load/validate_schema.py:0: error: Incompatible types in assignment (expression has type "dict[str, dict[str, str | bool]] | dict[str, str]", variable has type "dict[str, dict[str, str]]") [assignment]
posthog/warehouse/data_load/source_templates.py:0: error: Incompatible types in assignment (expression has type "str", variable has type "Type") [assignment]
-posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value]
-posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type]
-posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload]
-posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants:
-posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None
-posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]]
-posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T
-posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload]
-posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: Possible overload variants:
-posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, /) -> Sequence[str] | None
-posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str]
-posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T
-posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: Argument "source_id" to "sync_old_schemas_with_new_schemas" has incompatible type "str"; expected "UUID" [arg-type]
+posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index]
+posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, ndarray[Any, dtype[Any]]]"; expected type "str" [index]
+posthog/temporal/data_imports/pipelines/sql_database_v2/arrow_helpers.py:0: error: Invalid index type "str | None" for "dict[str, TColumnSchema]"; expected type "str" [index]
posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a return type annotation [no-untyped-def]
posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation [no-untyped-def]
posthog/tasks/exports/test/test_csv_exporter.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
@@ -664,7 +592,6 @@ posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" fo
posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined]
posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "_MonkeyPatchedResponse"; expected type "str" [index]
posthog/models/test/test_organization_model.py:0: error: Module "django.utils.timezone" does not explicitly export attribute "timedelta" [attr-defined]
-posthog/hogql_queries/test/test_actors_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | None" has no attribute "next_join" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "constraint" [union-attr]
posthog/hogql/test/test_resolver.py:0: error: Item "None" of "JoinConstraint | Any | None" has no attribute "constraint_type" [union-attr]
@@ -792,6 +719,22 @@ posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict k
posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 20 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item]
posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 21 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item]
posthog/temporal/data_modeling/run_workflow.py:0: error: Dict entry 22 has incompatible type "str": "Literal['complex']"; expected "str": "Literal['text', 'double', 'bool', 'timestamp', 'bigint', 'binary', 'json', 'decimal', 'wei', 'date', 'time']" [dict-item]
+posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: No overload variant of "get" of "dict" matches argument types "str", "tuple[()]" [call-overload]
+posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: Possible overload variants:
+posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, /) -> Sequence[str] | None
+posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def get(self, Type, Sequence[str], /) -> Sequence[str]
+posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: note: def [_T] get(self, Type, _T, /) -> Sequence[str] | _T
+posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py:0: error: Argument "source_id" to "sync_old_schemas_with_new_schemas" has incompatible type "str"; expected "UUID" [arg-type]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "Incremental[Any] | None" has no attribute "row_order" [union-attr]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "Literal['asc', 'desc'] | Any | None", variable has type "Literal['asc', 'desc']") [assignment]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Column[Any]") [assignment]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "Literal['asc', 'desc']") [assignment]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Item "None" of "dict[str, Any] | None" has no attribute "get" [union-attr]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Argument "primary_key" to "make_hints" has incompatible type "list[str] | None"; expected "str | Sequence[str] | Callable[[Any], str | Sequence[str]]" [arg-type]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/sql_database_v2/helpers.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: "FilesystemDestinationClientConfiguration" has no attribute "delta_jobs_per_write" [attr-defined]
posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: "type[FilesystemDestinationClientConfiguration]" has no attribute "delta_jobs_per_write" [attr-defined]
posthog/temporal/data_imports/pipelines/pipeline_sync.py:0: error: Incompatible types in assignment (expression has type "object", variable has type "DataWarehouseCredential | Combinable | None") [assignment]
@@ -826,6 +769,23 @@ posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0:
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_async_calls" (hint: "_execute_async_calls: list[] = ...") [var-annotated]
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_cursors" (hint: "_cursors: list[] = ...") [var-annotated]
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: List item 0 has incompatible type "tuple[str, str, int, int, int, int, str, int]"; expected "tuple[str, str, int, int, str, str, str, str]" [list-item]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "with_only_columns" of "Select" matches argument type "ReadOnlyColumnCollection[str, Column[Any]]" [call-overload]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants:
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], /) -> Select[tuple[_T0]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], /) -> Select[tuple[_T0, _T1]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], /) -> Select[tuple[_T0, _T1, _T2]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], /) -> Select[tuple[_T0, _T1, _T2, _T3]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7] with_only_columns(self, TypedColumnsClauseRole[_T0] | SQLCoreOperations[_T0] | type[_T0], TypedColumnsClauseRole[_T1] | SQLCoreOperations[_T1] | type[_T1], TypedColumnsClauseRole[_T2] | SQLCoreOperations[_T2] | type[_T2], TypedColumnsClauseRole[_T3] | SQLCoreOperations[_T3] | type[_T3], TypedColumnsClauseRole[_T4] | SQLCoreOperations[_T4] | type[_T4], TypedColumnsClauseRole[_T5] | SQLCoreOperations[_T5] | type[_T5], TypedColumnsClauseRole[_T6] | SQLCoreOperations[_T6] | type[_T6], TypedColumnsClauseRole[_T7] | SQLCoreOperations[_T7] | type[_T7], /) -> Select[tuple[_T0, _T1, _T2, _T3, _T4, _T5, _T6, _T7]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def with_only_columns(self, *entities: TypedColumnsClauseRole[Any] | ColumnsClauseRole | SQLCoreOperations[Any] | Literal['*', 1] | type[Any] | Inspectable[_HasClauseElement[Any]] | _HasClauseElement[Any], maintain_column_froms: bool = ..., **Any) -> Select[Any]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: error: No overload variant of "resource" matches argument types "Callable[[Engine, Table, int, Literal['sqlalchemy', 'pyarrow', 'pandas', 'connectorx'], Incremental[Any] | None, bool, Callable[[Table], None] | None, Literal['minimal', 'full', 'full_with_precision'], dict[str, Any] | None, Callable[[TypeEngine[Any]], TypeEngine[Any] | type[TypeEngine[Any]] | None] | None, list[str] | None, Callable[[Select[Any], Table], Select[Any]] | None, list[str] | None], Iterator[Any]]", "str", "list[str] | None", "list[str] | None", "dict[str, TColumnSchema]", "Collection[str]", "str" [call-overload]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: Possible overload variants:
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TResourceFunParams`-1, TDltResourceImpl: DltResource] resource(Callable[TResourceFunParams, Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> Callable[[Callable[TResourceFunParams, Any]], TDltResourceImpl]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(None = ..., /, name: str | Callable[[Any], str] = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ..., standalone: Literal[True] = ...) -> Callable[[Callable[TResourceFunParams, Any]], Callable[TResourceFunParams, TDltResourceImpl]]
+posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py:0: note: def [TDltResourceImpl: DltResource] resource(list[Any] | tuple[Any] | Iterator[Any], /, name: str = ..., table_name: str | Callable[[Any], str] = ..., max_table_nesting: int = ..., write_disposition: Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict | Callable[[Any], Literal['skip', 'append', 'replace', 'merge'] | TWriteDispositionDict | TMergeDispositionDict | TScd2StrategyDict] = ..., columns: dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel] | Callable[[Any], dict[str, TColumnSchema] | Sequence[TColumnSchema] | BaseModel | type[BaseModel]] = ..., primary_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., merge_key: str | Sequence[str] | Callable[[Any], str | Sequence[str]] = ..., schema_contract: Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | Callable[[Any], Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict] = ..., table_format: Literal['iceberg', 'delta', 'hive'] | Callable[[Any], Literal['iceberg', 'delta', 'hive']] = ..., file_format: Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference'] | Callable[[Any], Literal['preferred', 'jsonl', 'typed-jsonl', 'insert_values', 'parquet', 'csv', 'reference']] = ..., references: Sequence[TTableReference] | Callable[[Any], Sequence[TTableReference]] = ..., selected: bool = ..., spec: type[BaseConfiguration] = ..., parallelized: bool = ..., _impl_cls: type[TDltResourceImpl] = ...) -> TDltResourceImpl
posthog/migrations/0237_remove_timezone_from_teams.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type]
posthog/migrations/0228_fix_tile_layouts.py:0: error: Argument 2 to "RunPython" has incompatible type "Callable[[Migration, Any], None]"; expected "_CodeCallable | None" [arg-type]
posthog/api/plugin_log_entry.py:0: error: Name "timezone.datetime" is not defined [name-defined]
@@ -835,6 +795,29 @@ posthog/api/plugin_log_entry.py:0: error: Module "django.utils.timezone" does no
posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment]
posthog/api/sharing.py:0: error: Item "None" of "list[Any] | None" has no attribute "__iter__" (not iterable) [union-attr]
posthog/temporal/data_imports/external_data_job.py:0: error: Argument "status" to "update_external_job_status" has incompatible type "str"; expected "Status" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Dict entry 2 has incompatible type "Literal['auto']": "None"; expected "Literal['json_response', 'header_link', 'auto', 'single_page', 'cursor', 'offset', 'page_number']": "type[BasePaginator]" [dict-item]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "AuthConfigBase") [assignment]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "get_auth_class" has incompatible type "Literal['bearer', 'api_key', 'http_basic'] | None"; expected "Literal['bearer', 'api_key', 'http_basic']" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Need type annotation for "dependency_graph" [var-annotated]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "None", target has type "ResolvedParam") [assignment]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible return value type (got "tuple[TopologicalSorter[Any], dict[str, EndpointResource], dict[str, ResolvedParam]]", expected "tuple[Any, dict[str, EndpointResource], dict[str, ResolvedParam | None]]") [return-value]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("str | Endpoint | None") [operator]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type variable "StrOrLiteralStr" of "parse" of "Formatter" cannot be "str | None" [type-var]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unsupported right operand type for in ("dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None") [operator]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "pop" [union-attr]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Value of type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" is not indexable [index]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "str | None" has no attribute "format" [union-attr]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Argument 1 to "single_entity_path" has incompatible type "str | None"; expected "str" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Item "None" of "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None" has no attribute "items" [union-attr]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Incompatible types in assignment (expression has type "str | None", variable has type "str") [assignment]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Statement is unreachable [unreachable]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, Any]" [dict-item]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 0 has incompatible type "dict[str, Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
+posthog/temporal/data_imports/pipelines/rest_source/config_setup.py:0: error: Unpacked dict entry 1 has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "SupportsKeysAndGetItem[str, ResolveParamConfig | IncrementalParamConfig | Any]" [dict-item]
posthog/api/test/batch_exports/conftest.py:0: error: Signature of "run" incompatible with supertype "Worker" [override]
posthog/api/test/batch_exports/conftest.py:0: note: Superclass:
posthog/api/test/batch_exports/conftest.py:0: note: def run(self) -> Coroutine[Any, Any, None]
@@ -849,6 +832,49 @@ posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Not all union combinations were tried because there are too many unions [misc]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 2 to "source" has incompatible type "str | None"; expected "str" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 3 to "source" has incompatible type "str | None"; expected "str" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 4 to "source" has incompatible type "int | None"; expected "int" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 6 to "source" has incompatible type "Schema | None"; expected "Schema" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 7 to "source" has incompatible type "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict | None"; expected "Literal['evolve', 'discard_value', 'freeze', 'discard_row'] | TSchemaContractDict" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 8 to "source" has incompatible type "type[BaseConfiguration] | None"; expected "type[BaseConfiguration]" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "build_resource_dependency_graph" has incompatible type "EndpointResourceBase | None"; expected "EndpointResourceBase" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible types in assignment (expression has type "list[str] | None", variable has type "list[str]") [assignment]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "setup_incremental_object" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "dict[str, Any]" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument "base_url" to "RESTClient" has incompatible type "str | None"; expected "str" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Argument 1 to "exclude_keys" has incompatible type "dict[str, ResolveParamConfig | IncrementalParamConfig | Any] | None"; expected "Mapping[str, Any]" [arg-type]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Incompatible default for argument "resolved_param" (default has type "ResolvedParam | None", argument has type "ResolvedParam") [assignment]
+posthog/temporal/data_imports/pipelines/rest_source/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined]
+posthog/api/test/test_team.py:0: error: "HttpResponse" has no attribute "json" [attr-defined]
+posthog/test/test_middleware.py:0: error: Incompatible types in assignment (expression has type "_MonkeyPatchedWSGIResponse", variable has type "_MonkeyPatchedResponse") [assignment]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/zendesk/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/vitally/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/pipelines/stripe/__init__.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible return value type (got "dict[str, Collection[str]]", expected "dict[str, str]") [return-value]
posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "dict[str, str]") [assignment]
posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Unpacked dict entry 1 has incompatible type "str"; expected "SupportsKeysAndGetItem[str, str]" [dict-item]
@@ -890,6 +916,22 @@ posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExpo
posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index]
posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExport" is not indexable [index]
posthog/api/test/batch_exports/test_pause.py:0: error: "batch_export_delete_schedule" does not return a value (it only ever returns None) [func-returns-value]
+posthog/warehouse/api/external_data_schema.py:0: error: Incompatible return value type (got "str | None", expected "SyncType | None") [return-value]
+posthog/warehouse/api/external_data_schema.py:0: error: Argument 1 to "get_sql_schemas_for_source_type" has incompatible type "str"; expected "Type" [arg-type]
+posthog/warehouse/api/external_data_schema.py:0: error: No overload variant of "get" of "dict" matches argument type "str" [call-overload]
+posthog/warehouse/api/external_data_schema.py:0: note: Possible overload variants:
+posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, /) -> dict[str, list[IncrementalField]] | None
+posthog/warehouse/api/external_data_schema.py:0: note: def get(self, Type, dict[str, list[IncrementalField]], /) -> dict[str, list[IncrementalField]]
+posthog/warehouse/api/external_data_schema.py:0: note: def [_T] get(self, Type, _T, /) -> dict[str, list[IncrementalField]] | _T
+posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/warehouse/api/table.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/temporal/data_imports/external_data_job.py:0: error: Argument "status" to "update_external_job_status" has incompatible type "str"; expected "Status" [arg-type]
+posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
+posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
+posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
+posthog/temporal/tests/external_data/test_external_data_job.py:0: error: Invalid index type "str" for "dict[Type, Sequence[str]]"; expected type "Type" [index]
+posthog/temporal/tests/data_imports/test_end_to_end.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/api/query.py:0: error: Statement is unreachable [unreachable]
posthog/api/test/test_capture.py:0: error: Statement is unreachable [unreachable]
posthog/api/test/test_capture.py:0: error: Incompatible return value type (got "_MonkeyPatchedWSGIResponse", expected "HttpResponse") [return-value]
diff --git a/package.json b/package.json
index ca06eb6507b3d..4da2f9aff7c1a 100644
--- a/package.json
+++ b/package.json
@@ -161,7 +161,7 @@
"pmtiles": "^2.11.0",
"postcss": "^8.4.31",
"postcss-preset-env": "^9.3.0",
- "posthog-js": "1.194.3",
+ "posthog-js": "1.194.5",
"posthog-js-lite": "3.0.0",
"prettier": "^2.8.8",
"prop-types": "^15.7.2",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 30cdc4508d7f0..3097e83e0a10a 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -305,8 +305,8 @@ dependencies:
specifier: ^9.3.0
version: 9.3.0(postcss@8.4.31)
posthog-js:
- specifier: 1.194.3
- version: 1.194.3
+ specifier: 1.194.5
+ version: 1.194.5
posthog-js-lite:
specifier: 3.0.0
version: 3.0.0
@@ -17829,8 +17829,8 @@ packages:
resolution: {integrity: sha512-dyajjnfzZD1tht4N7p7iwf7nBnR1MjVaVu+MKr+7gBgA39bn28wizCIJZztZPtHy4PY0YwtSGgwfBCuG/hnHgA==}
dev: false
- /posthog-js@1.194.3:
- resolution: {integrity: sha512-/YFpBMqZzRpywa07QeoaIojdrUDijFajT4gZBSCFUBuZA5BN5xr5S1spsvtpT7E4RjkQSVgRvUngI4W19csgQw==}
+ /posthog-js@1.194.5:
+ resolution: {integrity: sha512-bYa20TkwzkDsr2y3iCiJNto/bthkYkmHZopIOXzFEw7KeB581Y1WueaOry5MFHEwnpZuomqEmcMQGBAoWvv8VA==}
dependencies:
core-js: 3.39.0
fflate: 0.4.8
diff --git a/posthog/api/cohort.py b/posthog/api/cohort.py
index 2d5d557f52b0b..762e9a5b4a894 100644
--- a/posthog/api/cohort.py
+++ b/posthog/api/cohort.py
@@ -14,6 +14,7 @@
)
from posthog.models.person.person import PersonDistinctId
from posthog.models.property.property import Property, PropertyGroup
+from posthog.models.team.team import Team
from posthog.queries.base import property_group_to_Q
from posthog.metrics import LABEL_TEAM_ID
from posthog.renderers import SafeJSONRenderer
@@ -22,8 +23,6 @@
from django.conf import settings
from django.db.models import QuerySet, Prefetch, prefetch_related_objects, OuterRef, Subquery
-from django.db.models.expressions import F
-from django.utils import timezone
from rest_framework import serializers, viewsets, request, status
from posthog.api.utils import action
from rest_framework.exceptions import ValidationError
@@ -52,7 +51,7 @@
from posthog.hogql.constants import CSV_EXPORT_LIMIT
from posthog.event_usage import report_user_action
from posthog.hogql.context import HogQLContext
-from posthog.models import Cohort, FeatureFlag, User, Person
+from posthog.models import Cohort, FeatureFlag, Person
from posthog.models.async_deletion import AsyncDeletion, DeletionType
from posthog.models.cohort.util import get_dependent_cohorts, print_cohort_hogql_query
from posthog.models.cohort import CohortOrEmpty
@@ -139,14 +138,14 @@ def _handle_static(self, cohort: Cohort, context: dict, validated_data: dict) ->
elif context.get("from_feature_flag_key"):
insert_cohort_from_feature_flag.delay(cohort.pk, context["from_feature_flag_key"], self.context["team_id"])
elif validated_data.get("query"):
- insert_cohort_from_query.delay(cohort.pk)
+ insert_cohort_from_query.delay(cohort.pk, self.context["team_id"])
else:
filter_data = request.GET.dict()
existing_cohort_id = context.get("from_cohort_id")
if existing_cohort_id:
filter_data = {**filter_data, "from_cohort_id": existing_cohort_id}
if filter_data:
- insert_cohort_from_insight_filter.delay(cohort.pk, filter_data)
+ insert_cohort_from_insight_filter.delay(cohort.pk, filter_data, self.context["team_id"])
def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Cohort:
request = self.context["request"]
@@ -173,7 +172,7 @@ def _calculate_static_by_csv(self, file, cohort: Cohort) -> None:
decoded_file = file.read().decode("utf-8").splitlines()
reader = csv.reader(decoded_file)
distinct_ids_and_emails = [row[0] for row in reader if len(row) > 0 and row]
- calculate_cohort_from_list.delay(cohort.pk, distinct_ids_and_emails)
+ calculate_cohort_from_list.delay(cohort.pk, distinct_ids_and_emails, team_id=self.context["team_id"])
def validate_query(self, query: Optional[dict]) -> Optional[dict]:
if not query:
@@ -195,7 +194,7 @@ def validate_filters(self, request_filters: dict):
instance = cast(Cohort, self.instance)
cohort_id = instance.pk
flags: QuerySet[FeatureFlag] = FeatureFlag.objects.filter(
- team_id=self.context["team_id"], active=True, deleted=False
+ team__project_id=self.context["project_id"], active=True, deleted=False
)
cohort_used_in_flags = len([flag for flag in flags if cohort_id in flag.get_cohort_ids()]) > 0
@@ -208,7 +207,7 @@ def validate_filters(self, request_filters: dict):
)
if prop.type == "cohort":
- nested_cohort = Cohort.objects.get(pk=prop.value, team_id=self.context["team_id"])
+ nested_cohort = Cohort.objects.get(pk=prop.value, team__project_id=self.context["project_id"])
dependent_cohorts = get_dependent_cohorts(nested_cohort)
for dependent_cohort in [nested_cohort, *dependent_cohorts]:
if (
@@ -229,7 +228,6 @@ def validate_filters(self, request_filters: dict):
def update(self, cohort: Cohort, validated_data: dict, *args: Any, **kwargs: Any) -> Cohort: # type: ignore
request = self.context["request"]
- user = cast(User, request.user)
cohort.name = validated_data.get("name", cohort.name)
cohort.description = validated_data.get("description", cohort.description)
@@ -240,22 +238,29 @@ def update(self, cohort: Cohort, validated_data: dict, *args: Any, **kwargs: Any
is_deletion_change = deleted_state is not None and cohort.deleted != deleted_state
if is_deletion_change:
+ relevant_team_ids = Team.objects.filter(project_id=cohort.team.project_id).values_list("id", flat=True)
cohort.deleted = deleted_state
if deleted_state:
# De-attach from experiments
cohort.experiment_set.set([])
- AsyncDeletion.objects.get_or_create(
- deletion_type=DeletionType.Cohort_full,
- team_id=cohort.team.pk,
- key=f"{cohort.pk}_{cohort.version}",
- created_by=user,
+ AsyncDeletion.objects.bulk_create(
+ [
+ AsyncDeletion(
+ deletion_type=DeletionType.Cohort_full,
+ team_id=team_id,
+ # Only appending `team_id` if it's not the same as the cohort's `team_id``, so that
+ # the migration to environments does not accidentally cause duplicate `AsyncDeletion`s
+ key=f"{cohort.pk}_{cohort.version}{('_'+team_id) if team_id != cohort.team_id else ''}",
+ )
+ for team_id in relevant_team_ids
+ ],
+ ignore_conflicts=True,
)
else:
AsyncDeletion.objects.filter(
deletion_type=DeletionType.Cohort_full,
- team_id=cohort.team.pk,
- key=f"{cohort.pk}_{cohort.version}",
+ key__startswith=f"{cohort.pk}_{cohort.version}", # We target this _prefix_, so all teams are covered
).delete()
elif not cohort.is_static:
cohort.is_calculating = True
@@ -475,12 +480,12 @@ def perform_update(self, serializer):
class LegacyCohortViewSet(CohortViewSet):
- param_derived_from_user_current_team = "project_id"
+ param_derived_from_user_current_team = "team_id"
def will_create_loops(cohort: Cohort) -> bool:
# Loops can only be formed when trying to update a Cohort, not when creating one
- team_id = cohort.team_id
+ project_id = cohort.team.project_id
# We can model this as a directed graph, where each node is a Cohort and each edge is a reference to another Cohort
# There's a loop only if there's a cycle in the directed graph. The "directed" bit is important.
@@ -501,7 +506,7 @@ def dfs_loop_helper(current_cohort: Cohort, seen_cohorts, cohorts_on_path):
return True
elif property.value not in seen_cohorts:
try:
- nested_cohort = Cohort.objects.get(pk=property.value, team_id=team_id)
+ nested_cohort = Cohort.objects.get(pk=property.value, team__project_id=project_id)
except Cohort.DoesNotExist:
raise ValidationError("Invalid Cohort ID in filter")
@@ -514,23 +519,21 @@ def dfs_loop_helper(current_cohort: Cohort, seen_cohorts, cohorts_on_path):
return dfs_loop_helper(cohort, set(), set())
-def insert_cohort_people_into_pg(cohort: Cohort):
+def insert_cohort_people_into_pg(cohort: Cohort, *, team_id: int):
ids = sync_execute(
- "SELECT person_id FROM {} where team_id = %(team_id)s AND cohort_id = %(cohort_id)s".format(
- PERSON_STATIC_COHORT_TABLE
- ),
- {"cohort_id": cohort.pk, "team_id": cohort.team.pk},
+ f"SELECT person_id FROM {PERSON_STATIC_COHORT_TABLE} where team_id = %(team_id)s AND cohort_id = %(cohort_id)s",
+ {"cohort_id": cohort.pk, "team_id": team_id},
)
- cohort.insert_users_list_by_uuid(items=[str(id[0]) for id in ids])
+ cohort.insert_users_list_by_uuid(items=[str(id[0]) for id in ids], team_id=team_id)
-def insert_cohort_query_actors_into_ch(cohort: Cohort):
- context = HogQLContext(enable_select_queries=True, team_id=cohort.team.pk)
- query = print_cohort_hogql_query(cohort, context)
- insert_actors_into_cohort_by_query(cohort, query, {}, context)
+def insert_cohort_query_actors_into_ch(cohort: Cohort, *, team: Team):
+ context = HogQLContext(enable_select_queries=True, team_id=team.id)
+ query = print_cohort_hogql_query(cohort, context, team=team)
+ insert_actors_into_cohort_by_query(cohort, query, {}, context, team_id=team.id)
-def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict):
+def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict, *, team_id: int):
from_existing_cohort_id = filter_data.get("from_cohort_id")
context: HogQLContext
@@ -543,7 +546,7 @@ def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict):
ORDER BY person_id
"""
params = {
- "team_id": cohort.team.pk,
+ "team_id": team_id,
"from_cohort_id": existing_cohort.pk,
"version": existing_cohort.version,
}
@@ -590,48 +593,36 @@ def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: dict):
else:
query, params = query_builder.actor_query(limit_actors=False)
- insert_actors_into_cohort_by_query(cohort, query, params, context)
-
-
-def insert_actors_into_cohort_by_query(cohort: Cohort, query: str, params: dict[str, Any], context: HogQLContext):
- try:
- sync_execute(
- INSERT_COHORT_ALL_PEOPLE_THROUGH_PERSON_ID.format(cohort_table=PERSON_STATIC_COHORT_TABLE, query=query),
- {
- "cohort_id": cohort.pk,
- "_timestamp": datetime.now(),
- "team_id": cohort.team.pk,
- **context.values,
- **params,
- },
- )
-
- cohort.is_calculating = False
- cohort.last_calculation = timezone.now()
- cohort.errors_calculating = 0
- cohort.last_error_at = None
- cohort.save(update_fields=["errors_calculating", "last_calculation", "is_calculating", "last_error_at"])
- except Exception as err:
- if settings.DEBUG:
- raise
- cohort.is_calculating = False
- cohort.errors_calculating = F("errors_calculating") + 1
- cohort.last_error_at = timezone.now()
- cohort.save(update_fields=["errors_calculating", "is_calculating", "last_error_at"])
- capture_exception(err)
+ insert_actors_into_cohort_by_query(cohort, query, params, context, team_id=team_id)
+
+
+def insert_actors_into_cohort_by_query(
+ cohort: Cohort, query: str, params: dict[str, Any], context: HogQLContext, *, team_id: int
+):
+ sync_execute(
+ INSERT_COHORT_ALL_PEOPLE_THROUGH_PERSON_ID.format(cohort_table=PERSON_STATIC_COHORT_TABLE, query=query),
+ {
+ "cohort_id": cohort.pk,
+ "_timestamp": datetime.now(),
+ "team_id": team_id,
+ **context.values,
+ **params,
+ },
+ )
def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int, batchsize: int = 1_000):
# :TODO: Find a way to incorporate this into the same code path as feature flag evaluation
+ team: Team = Team.objects.get(pk=team_id)
try:
- feature_flag = FeatureFlag.objects.get(team_id=team_id, key=flag)
+ feature_flag = FeatureFlag.objects.get(team__project_id=team.project_id, key=flag)
except FeatureFlag.DoesNotExist:
return []
if not feature_flag.active or feature_flag.deleted or feature_flag.aggregation_group_type_index is not None:
return []
- cohort = Cohort.objects.get(pk=cohort_id, team_id=team_id)
+ cohort = Cohort.objects.get(pk=cohort_id, team__project_id=team.project_id)
matcher_cache = FlagsMatcherCache(team_id)
uuids_to_add_to_cohort = []
cohorts_cache: dict[int, CohortOrEmpty] = {}
@@ -640,7 +631,9 @@ def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int,
# TODO: Consider disabling flags with cohorts for creating static cohorts
# because this is currently a lot more inefficient for flag matching,
# as we're required to go to the database for each person.
- cohorts_cache = {cohort.pk: cohort for cohort in Cohort.objects.filter(team_id=team_id, deleted=False)}
+ cohorts_cache = {
+ cohort.pk: cohort for cohort in Cohort.objects.filter(team__project_id=team.project_id, deleted=False)
+ }
default_person_properties = {}
for condition in feature_flag.conditions:
@@ -727,7 +720,7 @@ def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int,
if len(uuids_to_add_to_cohort) >= batchsize:
cohort.insert_users_list_by_uuid(
- uuids_to_add_to_cohort, insert_in_clickhouse=True, batchsize=batchsize
+ uuids_to_add_to_cohort, insert_in_clickhouse=True, batchsize=batchsize, team_id=team_id
)
uuids_to_add_to_cohort = []
@@ -735,7 +728,9 @@ def get_cohort_actors_for_feature_flag(cohort_id: int, flag: str, team_id: int,
batch_of_persons = queryset[start : start + batchsize]
if len(uuids_to_add_to_cohort) > 0:
- cohort.insert_users_list_by_uuid(uuids_to_add_to_cohort, insert_in_clickhouse=True, batchsize=batchsize)
+ cohort.insert_users_list_by_uuid(
+ uuids_to_add_to_cohort, insert_in_clickhouse=True, batchsize=batchsize, team_id=team_id
+ )
except Exception as err:
if settings.DEBUG or settings.TEST:
diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py
index 435ccbe1cf27f..25c71d898950c 100644
--- a/posthog/api/feature_flag.py
+++ b/posthog/api/feature_flag.py
@@ -746,7 +746,7 @@ def local_evaluation(self, request: request.Request, **kwargs):
"group_type_mapping": {
str(row.group_type_index): row.group_type
for row in GroupTypeMapping.objects.db_manager(DATABASE_FOR_LOCAL_EVALUATION).filter(
- team_id=self.team_id
+ project_id=self.project_id
)
},
"cohorts": cohorts,
diff --git a/posthog/api/organization.py b/posthog/api/organization.py
index c522ca164c0b9..6fe798479dd7b 100644
--- a/posthog/api/organization.py
+++ b/posthog/api/organization.py
@@ -5,6 +5,8 @@
from django.shortcuts import get_object_or_404
from rest_framework import exceptions, permissions, serializers, viewsets
from rest_framework.request import Request
+from rest_framework.response import Response
+import posthoganalytics
from posthog import settings
from posthog.api.routing import TeamAndOrgViewSetMixin
@@ -12,7 +14,7 @@
from posthog.auth import PersonalAPIKeyAuthentication
from posthog.cloud_utils import is_cloud
from posthog.constants import INTERNAL_BOT_EMAIL_SUFFIX, AvailableFeature
-from posthog.event_usage import report_organization_deleted
+from posthog.event_usage import report_organization_deleted, groups
from posthog.models import Organization, User
from posthog.models.async_deletion import AsyncDeletion, DeletionType
from posthog.rbac.user_access_control import UserAccessControlSerializerMixin
@@ -240,3 +242,24 @@ def get_serializer_context(self) -> dict[str, Any]:
**super().get_serializer_context(),
"user_permissions": UserPermissions(cast(User, self.request.user)),
}
+
+ def update(self, request: Request, *args: Any, **kwargs: Any) -> Response:
+ if "enforce_2fa" in request.data:
+ enforce_2fa_value = request.data["enforce_2fa"]
+ organization = self.get_object()
+ user = cast(User, request.user)
+
+ # Add capture event for 2FA enforcement change
+ posthoganalytics.capture(
+ str(user.distinct_id),
+ "organization 2fa enforcement toggled",
+ properties={
+ "enabled": enforce_2fa_value,
+ "organization_id": str(organization.id),
+ "organization_name": organization.name,
+ "user_role": user.organization_memberships.get(organization=organization).level,
+ },
+ groups=groups(organization),
+ )
+
+ return super().update(request, *args, **kwargs)
diff --git a/posthog/api/project.py b/posthog/api/project.py
index 9bc0d91cec45f..b3b808f520430 100644
--- a/posthog/api/project.py
+++ b/posthog/api/project.py
@@ -191,7 +191,7 @@ def get_effective_membership_level(self, project: Project) -> Optional[Organizat
return self.user_permissions.team(team).effective_membership_level
def get_has_group_types(self, project: Project) -> bool:
- return GroupTypeMapping.objects.filter(team_id=project.id).exists()
+ return GroupTypeMapping.objects.filter(project_id=project.id).exists()
def get_live_events_token(self, project: Project) -> Optional[str]:
team = project.teams.get(pk=project.pk)
diff --git a/posthog/api/team.py b/posthog/api/team.py
index f2486a68fe8a0..b473fc490ec7e 100644
--- a/posthog/api/team.py
+++ b/posthog/api/team.py
@@ -234,7 +234,7 @@ def get_effective_membership_level(self, team: Team) -> Optional[OrganizationMem
return self.user_permissions.team(team).effective_membership_level
def get_has_group_types(self, team: Team) -> bool:
- return GroupTypeMapping.objects.filter(team_id=team.id).exists()
+ return GroupTypeMapping.objects.filter(project_id=team.project_id).exists()
def get_live_events_token(self, team: Team) -> Optional[str]:
return encode_jwt(
diff --git a/posthog/api/test/__snapshots__/test_cohort.ambr b/posthog/api/test/__snapshots__/test_cohort.ambr
index f1fe8c5d00333..2a4e7cdcc4d86 100644
--- a/posthog/api/test/__snapshots__/test_cohort.ambr
+++ b/posthog/api/test/__snapshots__/test_cohort.ambr
@@ -98,11 +98,14 @@
# name: TestCohort.test_async_deletion_of_cohort.3
'''
/* user_id:0 celery:posthog.tasks.calculate_cohort.clear_stale_cohort */
- SELECT count()
+ SELECT team_id,
+ count() AS stale_people_count
FROM cohortpeople
- WHERE team_id = 99999
+ WHERE team_id IN [1, 2, 3, 4, 5 /* ... */]
AND cohort_id = 99999
AND version < 1
+ GROUP BY team_id
+ HAVING stale_people_count > 0
'''
# ---
# name: TestCohort.test_async_deletion_of_cohort.4
@@ -163,11 +166,14 @@
# name: TestCohort.test_async_deletion_of_cohort.7
'''
/* user_id:0 celery:posthog.tasks.calculate_cohort.clear_stale_cohort */
- SELECT count()
+ SELECT team_id,
+ count() AS stale_people_count
FROM cohortpeople
- WHERE team_id = 99999
+ WHERE team_id IN [1, 2, 3, 4, 5 /* ... */]
AND cohort_id = 99999
AND version < 2
+ GROUP BY team_id
+ HAVING stale_people_count > 0
'''
# ---
# name: TestCohort.test_async_deletion_of_cohort.8
diff --git a/posthog/api/test/__snapshots__/test_decide.ambr b/posthog/api/test/__snapshots__/test_decide.ambr
index 56f6257a978d2..7ab6aaee06295 100644
--- a/posthog/api/test/__snapshots__/test_decide.ambr
+++ b/posthog/api/test/__snapshots__/test_decide.ambr
@@ -519,7 +519,7 @@
'''
SELECT 1 AS "a"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
LIMIT 1
'''
# ---
diff --git a/posthog/api/test/__snapshots__/test_feature_flag.ambr b/posthog/api/test/__snapshots__/test_feature_flag.ambr
index b51af7a796f7d..8250f4f667393 100644
--- a/posthog/api/test/__snapshots__/test_feature_flag.ambr
+++ b/posthog/api/test/__snapshots__/test_feature_flag.ambr
@@ -336,6 +336,69 @@
'''
# ---
# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator
+ '''
+ SELECT "posthog_team"."id",
+ "posthog_team"."uuid",
+ "posthog_team"."organization_id",
+ "posthog_team"."project_id",
+ "posthog_team"."api_token",
+ "posthog_team"."app_urls",
+ "posthog_team"."name",
+ "posthog_team"."slack_incoming_webhook",
+ "posthog_team"."created_at",
+ "posthog_team"."updated_at",
+ "posthog_team"."anonymize_ips",
+ "posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
+ "posthog_team"."ingested_event",
+ "posthog_team"."autocapture_opt_out",
+ "posthog_team"."autocapture_web_vitals_opt_in",
+ "posthog_team"."autocapture_web_vitals_allowed_metrics",
+ "posthog_team"."autocapture_exceptions_opt_in",
+ "posthog_team"."autocapture_exceptions_errors_to_ignore",
+ "posthog_team"."person_processing_opt_out",
+ "posthog_team"."session_recording_opt_in",
+ "posthog_team"."session_recording_sample_rate",
+ "posthog_team"."session_recording_minimum_duration_milliseconds",
+ "posthog_team"."session_recording_linked_flag",
+ "posthog_team"."session_recording_network_payload_capture_config",
+ "posthog_team"."session_recording_url_trigger_config",
+ "posthog_team"."session_recording_url_blocklist_config",
+ "posthog_team"."session_recording_event_trigger_config",
+ "posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
+ "posthog_team"."capture_console_log_opt_in",
+ "posthog_team"."capture_performance_opt_in",
+ "posthog_team"."capture_dead_clicks",
+ "posthog_team"."surveys_opt_in",
+ "posthog_team"."heatmaps_opt_in",
+ "posthog_team"."session_recording_version",
+ "posthog_team"."signup_token",
+ "posthog_team"."is_demo",
+ "posthog_team"."access_control",
+ "posthog_team"."week_start_day",
+ "posthog_team"."inject_web_apps",
+ "posthog_team"."test_account_filters",
+ "posthog_team"."test_account_filters_default_checked",
+ "posthog_team"."path_cleaning_filters",
+ "posthog_team"."timezone",
+ "posthog_team"."data_attributes",
+ "posthog_team"."person_display_name_properties",
+ "posthog_team"."live_events_columns",
+ "posthog_team"."recording_domains",
+ "posthog_team"."primary_dashboard_id",
+ "posthog_team"."extra_settings",
+ "posthog_team"."modifiers",
+ "posthog_team"."correlation_config",
+ "posthog_team"."session_recording_retention_period_days",
+ "posthog_team"."external_data_workspace_id",
+ "posthog_team"."external_data_workspace_last_synced_at"
+ FROM "posthog_team"
+ WHERE "posthog_team"."id" = 99999
+ LIMIT 21
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.1
'''
SELECT "posthog_featureflag"."id",
"posthog_featureflag"."key",
@@ -353,12 +416,13 @@
"posthog_featureflag"."usage_dashboard_id",
"posthog_featureflag"."has_enriched_analytics"
FROM "posthog_featureflag"
+ INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id")
WHERE ("posthog_featureflag"."key" = 'some-feature2'
- AND "posthog_featureflag"."team_id" = 99999)
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.1
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.2
'''
SELECT "posthog_cohort"."id",
"posthog_cohort"."name",
@@ -379,12 +443,13 @@
"posthog_cohort"."is_static",
"posthog_cohort"."groups"
FROM "posthog_cohort"
+ INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id")
WHERE ("posthog_cohort"."id" = 99999
- AND "posthog_cohort"."team_id" = 99999)
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.2
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.3
'''
SELECT "posthog_person"."id",
"posthog_person"."created_at",
@@ -405,7 +470,7 @@
LIMIT 2
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.3
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.4
'''
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -425,7 +490,7 @@
5 /* ... */))
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.4
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.5
'''
SELECT "posthog_person"."uuid"
FROM "posthog_person"
@@ -440,76 +505,6 @@
LIMIT 1)))
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.5
- '''
- SELECT "posthog_team"."id",
- "posthog_team"."uuid",
- "posthog_team"."organization_id",
- "posthog_team"."project_id",
- "posthog_team"."api_token",
- "posthog_team"."app_urls",
- "posthog_team"."name",
- "posthog_team"."slack_incoming_webhook",
- "posthog_team"."created_at",
- "posthog_team"."updated_at",
- "posthog_team"."anonymize_ips",
- "posthog_team"."completed_snippet_onboarding",
- "posthog_team"."has_completed_onboarding_for",
- "posthog_team"."ingested_event",
- "posthog_team"."autocapture_opt_out",
- "posthog_team"."autocapture_web_vitals_opt_in",
- "posthog_team"."autocapture_web_vitals_allowed_metrics",
- "posthog_team"."autocapture_exceptions_opt_in",
- "posthog_team"."autocapture_exceptions_errors_to_ignore",
- "posthog_team"."person_processing_opt_out",
- "posthog_team"."session_recording_opt_in",
- "posthog_team"."session_recording_sample_rate",
- "posthog_team"."session_recording_minimum_duration_milliseconds",
- "posthog_team"."session_recording_linked_flag",
- "posthog_team"."session_recording_network_payload_capture_config",
- "posthog_team"."session_recording_url_trigger_config",
- "posthog_team"."session_recording_url_blocklist_config",
- "posthog_team"."session_recording_event_trigger_config",
- "posthog_team"."session_replay_config",
- "posthog_team"."survey_config",
- "posthog_team"."capture_console_log_opt_in",
- "posthog_team"."capture_performance_opt_in",
- "posthog_team"."capture_dead_clicks",
- "posthog_team"."surveys_opt_in",
- "posthog_team"."heatmaps_opt_in",
- "posthog_team"."session_recording_version",
- "posthog_team"."signup_token",
- "posthog_team"."is_demo",
- "posthog_team"."access_control",
- "posthog_team"."week_start_day",
- "posthog_team"."inject_web_apps",
- "posthog_team"."test_account_filters",
- "posthog_team"."test_account_filters_default_checked",
- "posthog_team"."path_cleaning_filters",
- "posthog_team"."timezone",
- "posthog_team"."data_attributes",
- "posthog_team"."person_display_name_properties",
- "posthog_team"."live_events_columns",
- "posthog_team"."recording_domains",
- "posthog_team"."primary_dashboard_id",
- "posthog_team"."extra_settings",
- "posthog_team"."modifiers",
- "posthog_team"."correlation_config",
- "posthog_team"."session_recording_retention_period_days",
- "posthog_team"."plugins_opt_in",
- "posthog_team"."opt_out_capture",
- "posthog_team"."event_names",
- "posthog_team"."event_names_with_usage",
- "posthog_team"."event_properties",
- "posthog_team"."event_properties_with_usage",
- "posthog_team"."event_properties_numerical",
- "posthog_team"."external_data_workspace_id",
- "posthog_team"."external_data_workspace_last_synced_at"
- FROM "posthog_team"
- WHERE "posthog_team"."id" = 99999
- LIMIT 21
- '''
-# ---
# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_iterator.6
'''
SELECT "posthog_person"."id",
@@ -589,72 +584,6 @@
'''
# ---
# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too
- '''
- SELECT "posthog_featureflag"."id",
- "posthog_featureflag"."key",
- "posthog_featureflag"."name",
- "posthog_featureflag"."filters",
- "posthog_featureflag"."rollout_percentage",
- "posthog_featureflag"."team_id",
- "posthog_featureflag"."created_by_id",
- "posthog_featureflag"."created_at",
- "posthog_featureflag"."deleted",
- "posthog_featureflag"."active",
- "posthog_featureflag"."rollback_conditions",
- "posthog_featureflag"."performed_rollback",
- "posthog_featureflag"."ensure_experience_continuity",
- "posthog_featureflag"."usage_dashboard_id",
- "posthog_featureflag"."has_enriched_analytics"
- FROM "posthog_featureflag"
- WHERE ("posthog_featureflag"."key" = 'some-feature-new'
- AND "posthog_featureflag"."team_id" = 99999)
- LIMIT 21
- '''
-# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.1
- '''
- SELECT "posthog_cohort"."id",
- "posthog_cohort"."name",
- "posthog_cohort"."description",
- "posthog_cohort"."team_id",
- "posthog_cohort"."deleted",
- "posthog_cohort"."filters",
- "posthog_cohort"."query",
- "posthog_cohort"."version",
- "posthog_cohort"."pending_version",
- "posthog_cohort"."count",
- "posthog_cohort"."created_by_id",
- "posthog_cohort"."created_at",
- "posthog_cohort"."is_calculating",
- "posthog_cohort"."last_calculation",
- "posthog_cohort"."errors_calculating",
- "posthog_cohort"."last_error_at",
- "posthog_cohort"."is_static",
- "posthog_cohort"."groups"
- FROM "posthog_cohort"
- WHERE ("posthog_cohort"."id" = 99999
- AND "posthog_cohort"."team_id" = 99999)
- LIMIT 21
- '''
-# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.10
- '''
- SELECT "posthog_person"."uuid"
- FROM "posthog_person"
- WHERE ("posthog_person"."team_id" = 99999
- AND "posthog_person"."uuid" IN ('00000000000040008000000000000000'::uuid,
- '00000000000040008000000000000001'::uuid,
- '00000000000040008000000000000002'::uuid,
- '00000000000040008000000000000003'::uuid)
- AND NOT (EXISTS
- (SELECT 1 AS "a"
- FROM "posthog_cohortpeople" U1
- WHERE (U1."cohort_id" = 99999
- AND U1."person_id" = ("posthog_person"."id"))
- LIMIT 1)))
- '''
-# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.11
'''
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -710,13 +639,6 @@
"posthog_team"."modifiers",
"posthog_team"."correlation_config",
"posthog_team"."session_recording_retention_period_days",
- "posthog_team"."plugins_opt_in",
- "posthog_team"."opt_out_capture",
- "posthog_team"."event_names",
- "posthog_team"."event_names_with_usage",
- "posthog_team"."event_properties",
- "posthog_team"."event_properties_with_usage",
- "posthog_team"."event_properties_numerical",
"posthog_team"."external_data_workspace_id",
"posthog_team"."external_data_workspace_last_synced_at"
FROM "posthog_team"
@@ -724,32 +646,31 @@
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.2
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.1
'''
- SELECT "posthog_cohort"."id",
- "posthog_cohort"."name",
- "posthog_cohort"."description",
- "posthog_cohort"."team_id",
- "posthog_cohort"."deleted",
- "posthog_cohort"."filters",
- "posthog_cohort"."query",
- "posthog_cohort"."version",
- "posthog_cohort"."pending_version",
- "posthog_cohort"."count",
- "posthog_cohort"."created_by_id",
- "posthog_cohort"."created_at",
- "posthog_cohort"."is_calculating",
- "posthog_cohort"."last_calculation",
- "posthog_cohort"."errors_calculating",
- "posthog_cohort"."last_error_at",
- "posthog_cohort"."is_static",
- "posthog_cohort"."groups"
- FROM "posthog_cohort"
- WHERE (NOT "posthog_cohort"."deleted"
- AND "posthog_cohort"."team_id" = 99999)
+ SELECT "posthog_featureflag"."id",
+ "posthog_featureflag"."key",
+ "posthog_featureflag"."name",
+ "posthog_featureflag"."filters",
+ "posthog_featureflag"."rollout_percentage",
+ "posthog_featureflag"."team_id",
+ "posthog_featureflag"."created_by_id",
+ "posthog_featureflag"."created_at",
+ "posthog_featureflag"."deleted",
+ "posthog_featureflag"."active",
+ "posthog_featureflag"."rollback_conditions",
+ "posthog_featureflag"."performed_rollback",
+ "posthog_featureflag"."ensure_experience_continuity",
+ "posthog_featureflag"."usage_dashboard_id",
+ "posthog_featureflag"."has_enriched_analytics"
+ FROM "posthog_featureflag"
+ INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id")
+ WHERE ("posthog_featureflag"."key" = 'some-feature-new'
+ AND "posthog_team"."project_id" = 99999)
+ LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.3
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.10
'''
SELECT "posthog_person"."id",
"posthog_person"."created_at",
@@ -786,9 +707,119 @@
AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb))))
ORDER BY "posthog_person"."id" ASC
LIMIT 1000
+ OFFSET 1000
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.11
+ '''
+ SELECT "posthog_person"."uuid"
+ FROM "posthog_person"
+ WHERE ("posthog_person"."team_id" = 99999
+ AND "posthog_person"."uuid" IN ('00000000000040008000000000000000'::uuid,
+ '00000000000040008000000000000001'::uuid,
+ '00000000000040008000000000000002'::uuid,
+ '00000000000040008000000000000003'::uuid)
+ AND NOT (EXISTS
+ (SELECT 1 AS "a"
+ FROM "posthog_cohortpeople" U1
+ WHERE (U1."cohort_id" = 99999
+ AND U1."person_id" = ("posthog_person"."id"))
+ LIMIT 1)))
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.2
+ '''
+ SELECT "posthog_cohort"."id",
+ "posthog_cohort"."name",
+ "posthog_cohort"."description",
+ "posthog_cohort"."team_id",
+ "posthog_cohort"."deleted",
+ "posthog_cohort"."filters",
+ "posthog_cohort"."query",
+ "posthog_cohort"."version",
+ "posthog_cohort"."pending_version",
+ "posthog_cohort"."count",
+ "posthog_cohort"."created_by_id",
+ "posthog_cohort"."created_at",
+ "posthog_cohort"."is_calculating",
+ "posthog_cohort"."last_calculation",
+ "posthog_cohort"."errors_calculating",
+ "posthog_cohort"."last_error_at",
+ "posthog_cohort"."is_static",
+ "posthog_cohort"."groups"
+ FROM "posthog_cohort"
+ INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id")
+ WHERE ("posthog_cohort"."id" = 99999
+ AND "posthog_team"."project_id" = 99999)
+ LIMIT 21
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.3
+ '''
+ SELECT "posthog_cohort"."id",
+ "posthog_cohort"."name",
+ "posthog_cohort"."description",
+ "posthog_cohort"."team_id",
+ "posthog_cohort"."deleted",
+ "posthog_cohort"."filters",
+ "posthog_cohort"."query",
+ "posthog_cohort"."version",
+ "posthog_cohort"."pending_version",
+ "posthog_cohort"."count",
+ "posthog_cohort"."created_by_id",
+ "posthog_cohort"."created_at",
+ "posthog_cohort"."is_calculating",
+ "posthog_cohort"."last_calculation",
+ "posthog_cohort"."errors_calculating",
+ "posthog_cohort"."last_error_at",
+ "posthog_cohort"."is_static",
+ "posthog_cohort"."groups"
+ FROM "posthog_cohort"
+ INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id")
+ WHERE (NOT "posthog_cohort"."deleted"
+ AND "posthog_team"."project_id" = 99999)
'''
# ---
# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.4
+ '''
+ SELECT "posthog_person"."id",
+ "posthog_person"."created_at",
+ "posthog_person"."properties_last_updated_at",
+ "posthog_person"."properties_last_operation",
+ "posthog_person"."team_id",
+ "posthog_person"."properties",
+ "posthog_person"."is_user_id",
+ "posthog_person"."is_identified",
+ "posthog_person"."uuid",
+ "posthog_person"."version"
+ FROM "posthog_person"
+ WHERE ("posthog_person"."team_id" = 99999
+ AND ((("posthog_person"."properties" -> 'group') = '"none"'::jsonb
+ AND "posthog_person"."properties" ? 'group'
+ AND NOT (("posthog_person"."properties" -> 'group') = 'null'::jsonb))
+ OR (("posthog_person"."properties" -> 'group2') IN ('1'::jsonb,
+ '2'::jsonb,
+ '3'::jsonb)
+ AND "posthog_person"."properties" ? 'group2'
+ AND NOT (("posthog_person"."properties" -> 'group2') = 'null'::jsonb))
+ OR EXISTS
+ (SELECT 1 AS "a"
+ FROM "posthog_cohortpeople" U0
+ WHERE (U0."cohort_id" = 99999
+ AND U0."cohort_id" = 99999
+ AND U0."person_id" = ("posthog_person"."id"))
+ LIMIT 1)
+ OR (("posthog_person"."properties" -> 'does-not-exist') = '"none"'::jsonb
+ AND "posthog_person"."properties" ? 'does-not-exist'
+ AND NOT (("posthog_person"."properties" -> 'does-not-exist') = 'null'::jsonb))
+ OR (("posthog_person"."properties" -> 'key') = '"value"'::jsonb
+ AND "posthog_person"."properties" ? 'key'
+ AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb))))
+ ORDER BY "posthog_person"."id" ASC
+ LIMIT 1000
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.5
'''
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -808,7 +839,7 @@
5 /* ... */))
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.5
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.6
'''
SELECT ("posthog_person"."id" IS NULL
OR "posthog_person"."id" IS NULL
@@ -827,7 +858,7 @@
AND "posthog_person"."team_id" = 99999)
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.6
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.7
'''
SELECT ("posthog_person"."id" IS NOT NULL
OR "posthog_person"."id" IS NULL
@@ -846,7 +877,7 @@
AND "posthog_person"."team_id" = 99999)
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.7
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.8
'''
SELECT ("posthog_person"."id" IS NULL
OR "posthog_person"."id" IS NOT NULL
@@ -865,7 +896,7 @@
AND "posthog_person"."team_id" = 99999)
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.8
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.9
'''
SELECT ("posthog_person"."id" IS NULL
OR "posthog_person"."id" IS NULL
@@ -884,47 +915,70 @@
AND "posthog_person"."team_id" = 99999)
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_cohort_flag_adds_cohort_props_as_default_too.9
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment
'''
- SELECT "posthog_person"."id",
- "posthog_person"."created_at",
- "posthog_person"."properties_last_updated_at",
- "posthog_person"."properties_last_operation",
- "posthog_person"."team_id",
- "posthog_person"."properties",
- "posthog_person"."is_user_id",
- "posthog_person"."is_identified",
- "posthog_person"."uuid",
- "posthog_person"."version"
- FROM "posthog_person"
- WHERE ("posthog_person"."team_id" = 99999
- AND ((("posthog_person"."properties" -> 'group') = '"none"'::jsonb
- AND "posthog_person"."properties" ? 'group'
- AND NOT (("posthog_person"."properties" -> 'group') = 'null'::jsonb))
- OR (("posthog_person"."properties" -> 'group2') IN ('1'::jsonb,
- '2'::jsonb,
- '3'::jsonb)
- AND "posthog_person"."properties" ? 'group2'
- AND NOT (("posthog_person"."properties" -> 'group2') = 'null'::jsonb))
- OR EXISTS
- (SELECT 1 AS "a"
- FROM "posthog_cohortpeople" U0
- WHERE (U0."cohort_id" = 99999
- AND U0."cohort_id" = 99999
- AND U0."person_id" = ("posthog_person"."id"))
- LIMIT 1)
- OR (("posthog_person"."properties" -> 'does-not-exist') = '"none"'::jsonb
- AND "posthog_person"."properties" ? 'does-not-exist'
- AND NOT (("posthog_person"."properties" -> 'does-not-exist') = 'null'::jsonb))
- OR (("posthog_person"."properties" -> 'key') = '"value"'::jsonb
- AND "posthog_person"."properties" ? 'key'
- AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb))))
- ORDER BY "posthog_person"."id" ASC
- LIMIT 1000
- OFFSET 1000
+ SELECT "posthog_team"."id",
+ "posthog_team"."uuid",
+ "posthog_team"."organization_id",
+ "posthog_team"."project_id",
+ "posthog_team"."api_token",
+ "posthog_team"."app_urls",
+ "posthog_team"."name",
+ "posthog_team"."slack_incoming_webhook",
+ "posthog_team"."created_at",
+ "posthog_team"."updated_at",
+ "posthog_team"."anonymize_ips",
+ "posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
+ "posthog_team"."ingested_event",
+ "posthog_team"."autocapture_opt_out",
+ "posthog_team"."autocapture_web_vitals_opt_in",
+ "posthog_team"."autocapture_web_vitals_allowed_metrics",
+ "posthog_team"."autocapture_exceptions_opt_in",
+ "posthog_team"."autocapture_exceptions_errors_to_ignore",
+ "posthog_team"."person_processing_opt_out",
+ "posthog_team"."session_recording_opt_in",
+ "posthog_team"."session_recording_sample_rate",
+ "posthog_team"."session_recording_minimum_duration_milliseconds",
+ "posthog_team"."session_recording_linked_flag",
+ "posthog_team"."session_recording_network_payload_capture_config",
+ "posthog_team"."session_recording_url_trigger_config",
+ "posthog_team"."session_recording_url_blocklist_config",
+ "posthog_team"."session_recording_event_trigger_config",
+ "posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
+ "posthog_team"."capture_console_log_opt_in",
+ "posthog_team"."capture_performance_opt_in",
+ "posthog_team"."capture_dead_clicks",
+ "posthog_team"."surveys_opt_in",
+ "posthog_team"."heatmaps_opt_in",
+ "posthog_team"."session_recording_version",
+ "posthog_team"."signup_token",
+ "posthog_team"."is_demo",
+ "posthog_team"."access_control",
+ "posthog_team"."week_start_day",
+ "posthog_team"."inject_web_apps",
+ "posthog_team"."test_account_filters",
+ "posthog_team"."test_account_filters_default_checked",
+ "posthog_team"."path_cleaning_filters",
+ "posthog_team"."timezone",
+ "posthog_team"."data_attributes",
+ "posthog_team"."person_display_name_properties",
+ "posthog_team"."live_events_columns",
+ "posthog_team"."recording_domains",
+ "posthog_team"."primary_dashboard_id",
+ "posthog_team"."extra_settings",
+ "posthog_team"."modifiers",
+ "posthog_team"."correlation_config",
+ "posthog_team"."session_recording_retention_period_days",
+ "posthog_team"."external_data_workspace_id",
+ "posthog_team"."external_data_workspace_last_synced_at"
+ FROM "posthog_team"
+ WHERE "posthog_team"."id" = 99999
+ LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.1
'''
SELECT "posthog_featureflag"."id",
"posthog_featureflag"."key",
@@ -942,38 +996,32 @@
"posthog_featureflag"."usage_dashboard_id",
"posthog_featureflag"."has_enriched_analytics"
FROM "posthog_featureflag"
+ INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id")
WHERE ("posthog_featureflag"."key" = 'some-feature2'
- AND "posthog_featureflag"."team_id" = 99999)
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.1
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.10
'''
- SELECT "posthog_cohort"."id",
- "posthog_cohort"."name",
- "posthog_cohort"."description",
- "posthog_cohort"."team_id",
- "posthog_cohort"."deleted",
- "posthog_cohort"."filters",
- "posthog_cohort"."query",
- "posthog_cohort"."version",
- "posthog_cohort"."pending_version",
- "posthog_cohort"."count",
- "posthog_cohort"."created_by_id",
- "posthog_cohort"."created_at",
- "posthog_cohort"."is_calculating",
- "posthog_cohort"."last_calculation",
- "posthog_cohort"."errors_calculating",
- "posthog_cohort"."last_error_at",
- "posthog_cohort"."is_static",
- "posthog_cohort"."groups"
- FROM "posthog_cohort"
- WHERE ("posthog_cohort"."id" = 99999
- AND "posthog_cohort"."team_id" = 99999)
- LIMIT 21
+ SELECT "posthog_person"."id",
+ "posthog_person"."created_at",
+ "posthog_person"."properties_last_updated_at",
+ "posthog_person"."properties_last_operation",
+ "posthog_person"."team_id",
+ "posthog_person"."properties",
+ "posthog_person"."is_user_id",
+ "posthog_person"."is_identified",
+ "posthog_person"."uuid",
+ "posthog_person"."version"
+ FROM "posthog_person"
+ WHERE ("posthog_person"."team_id" = 99999
+ AND ("posthog_person"."properties" -> 'key') IS NOT NULL)
+ ORDER BY "posthog_person"."id" ASC
+ LIMIT 1000
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.10
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.11
'''
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -993,7 +1041,7 @@
5 /* ... */))
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.11
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.12
'''
SELECT "posthog_person"."id",
"posthog_person"."created_at",
@@ -1013,7 +1061,7 @@
OFFSET 1000
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.12
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.13
'''
SELECT "posthog_person"."uuid"
FROM "posthog_person"
@@ -1028,77 +1076,34 @@
LIMIT 1)))
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.13
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.2
'''
- SELECT "posthog_team"."id",
- "posthog_team"."uuid",
- "posthog_team"."organization_id",
- "posthog_team"."project_id",
- "posthog_team"."api_token",
- "posthog_team"."app_urls",
- "posthog_team"."name",
- "posthog_team"."slack_incoming_webhook",
- "posthog_team"."created_at",
- "posthog_team"."updated_at",
- "posthog_team"."anonymize_ips",
- "posthog_team"."completed_snippet_onboarding",
- "posthog_team"."has_completed_onboarding_for",
- "posthog_team"."ingested_event",
- "posthog_team"."autocapture_opt_out",
- "posthog_team"."autocapture_web_vitals_opt_in",
- "posthog_team"."autocapture_web_vitals_allowed_metrics",
- "posthog_team"."autocapture_exceptions_opt_in",
- "posthog_team"."autocapture_exceptions_errors_to_ignore",
- "posthog_team"."person_processing_opt_out",
- "posthog_team"."session_recording_opt_in",
- "posthog_team"."session_recording_sample_rate",
- "posthog_team"."session_recording_minimum_duration_milliseconds",
- "posthog_team"."session_recording_linked_flag",
- "posthog_team"."session_recording_network_payload_capture_config",
- "posthog_team"."session_recording_url_trigger_config",
- "posthog_team"."session_recording_url_blocklist_config",
- "posthog_team"."session_recording_event_trigger_config",
- "posthog_team"."session_replay_config",
- "posthog_team"."survey_config",
- "posthog_team"."capture_console_log_opt_in",
- "posthog_team"."capture_performance_opt_in",
- "posthog_team"."capture_dead_clicks",
- "posthog_team"."surveys_opt_in",
- "posthog_team"."heatmaps_opt_in",
- "posthog_team"."session_recording_version",
- "posthog_team"."signup_token",
- "posthog_team"."is_demo",
- "posthog_team"."access_control",
- "posthog_team"."week_start_day",
- "posthog_team"."inject_web_apps",
- "posthog_team"."test_account_filters",
- "posthog_team"."test_account_filters_default_checked",
- "posthog_team"."path_cleaning_filters",
- "posthog_team"."timezone",
- "posthog_team"."data_attributes",
- "posthog_team"."person_display_name_properties",
- "posthog_team"."live_events_columns",
- "posthog_team"."recording_domains",
- "posthog_team"."primary_dashboard_id",
- "posthog_team"."extra_settings",
- "posthog_team"."modifiers",
- "posthog_team"."correlation_config",
- "posthog_team"."session_recording_retention_period_days",
- "posthog_team"."plugins_opt_in",
- "posthog_team"."opt_out_capture",
- "posthog_team"."event_names",
- "posthog_team"."event_names_with_usage",
- "posthog_team"."event_properties",
- "posthog_team"."event_properties_with_usage",
- "posthog_team"."event_properties_numerical",
- "posthog_team"."external_data_workspace_id",
- "posthog_team"."external_data_workspace_last_synced_at"
- FROM "posthog_team"
- WHERE "posthog_team"."id" = 99999
+ SELECT "posthog_cohort"."id",
+ "posthog_cohort"."name",
+ "posthog_cohort"."description",
+ "posthog_cohort"."team_id",
+ "posthog_cohort"."deleted",
+ "posthog_cohort"."filters",
+ "posthog_cohort"."query",
+ "posthog_cohort"."version",
+ "posthog_cohort"."pending_version",
+ "posthog_cohort"."count",
+ "posthog_cohort"."created_by_id",
+ "posthog_cohort"."created_at",
+ "posthog_cohort"."is_calculating",
+ "posthog_cohort"."last_calculation",
+ "posthog_cohort"."errors_calculating",
+ "posthog_cohort"."last_error_at",
+ "posthog_cohort"."is_static",
+ "posthog_cohort"."groups"
+ FROM "posthog_cohort"
+ INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id")
+ WHERE ("posthog_cohort"."id" = 99999
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.2
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.3
'''
SELECT "posthog_person"."id",
"posthog_person"."created_at",
@@ -1119,7 +1124,7 @@
LIMIT 1000
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.3
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.4
'''
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -1139,7 +1144,7 @@
5 /* ... */))
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.4
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.5
'''
SELECT "posthog_person"."id",
"posthog_person"."created_at",
@@ -1161,7 +1166,7 @@
OFFSET 1000
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.5
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.6
'''
SELECT "posthog_person"."uuid"
FROM "posthog_person"
@@ -1175,7 +1180,7 @@
LIMIT 1)))
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.6
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.7
'''
SELECT "posthog_team"."id",
"posthog_team"."uuid",
@@ -1231,13 +1236,6 @@
"posthog_team"."modifiers",
"posthog_team"."correlation_config",
"posthog_team"."session_recording_retention_period_days",
- "posthog_team"."plugins_opt_in",
- "posthog_team"."opt_out_capture",
- "posthog_team"."event_names",
- "posthog_team"."event_names_with_usage",
- "posthog_team"."event_properties",
- "posthog_team"."event_properties_with_usage",
- "posthog_team"."event_properties_numerical",
"posthog_team"."external_data_workspace_id",
"posthog_team"."external_data_workspace_last_synced_at"
FROM "posthog_team"
@@ -1245,7 +1243,7 @@
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.7
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.8
'''
SELECT "posthog_featureflag"."id",
"posthog_featureflag"."key",
@@ -1263,12 +1261,13 @@
"posthog_featureflag"."usage_dashboard_id",
"posthog_featureflag"."has_enriched_analytics"
FROM "posthog_featureflag"
+ INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id")
WHERE ("posthog_featureflag"."key" = 'some-feature-new'
- AND "posthog_featureflag"."team_id" = 99999)
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.8
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.9
'''
SELECT "posthog_cohort"."id",
"posthog_cohort"."name",
@@ -1289,31 +1288,76 @@
"posthog_cohort"."is_static",
"posthog_cohort"."groups"
FROM "posthog_cohort"
+ INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id")
WHERE ("posthog_cohort"."id" = 99999
- AND "posthog_cohort"."team_id" = 99999)
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_default_person_properties_adjustment.9
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag
'''
- SELECT "posthog_person"."id",
- "posthog_person"."created_at",
- "posthog_person"."properties_last_updated_at",
- "posthog_person"."properties_last_operation",
- "posthog_person"."team_id",
- "posthog_person"."properties",
- "posthog_person"."is_user_id",
- "posthog_person"."is_identified",
- "posthog_person"."uuid",
- "posthog_person"."version"
- FROM "posthog_person"
- WHERE ("posthog_person"."team_id" = 99999
- AND ("posthog_person"."properties" -> 'key') IS NOT NULL)
- ORDER BY "posthog_person"."id" ASC
- LIMIT 1000
+ SELECT "posthog_team"."id",
+ "posthog_team"."uuid",
+ "posthog_team"."organization_id",
+ "posthog_team"."project_id",
+ "posthog_team"."api_token",
+ "posthog_team"."app_urls",
+ "posthog_team"."name",
+ "posthog_team"."slack_incoming_webhook",
+ "posthog_team"."created_at",
+ "posthog_team"."updated_at",
+ "posthog_team"."anonymize_ips",
+ "posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
+ "posthog_team"."ingested_event",
+ "posthog_team"."autocapture_opt_out",
+ "posthog_team"."autocapture_web_vitals_opt_in",
+ "posthog_team"."autocapture_web_vitals_allowed_metrics",
+ "posthog_team"."autocapture_exceptions_opt_in",
+ "posthog_team"."autocapture_exceptions_errors_to_ignore",
+ "posthog_team"."person_processing_opt_out",
+ "posthog_team"."session_recording_opt_in",
+ "posthog_team"."session_recording_sample_rate",
+ "posthog_team"."session_recording_minimum_duration_milliseconds",
+ "posthog_team"."session_recording_linked_flag",
+ "posthog_team"."session_recording_network_payload_capture_config",
+ "posthog_team"."session_recording_url_trigger_config",
+ "posthog_team"."session_recording_url_blocklist_config",
+ "posthog_team"."session_recording_event_trigger_config",
+ "posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
+ "posthog_team"."capture_console_log_opt_in",
+ "posthog_team"."capture_performance_opt_in",
+ "posthog_team"."capture_dead_clicks",
+ "posthog_team"."surveys_opt_in",
+ "posthog_team"."heatmaps_opt_in",
+ "posthog_team"."session_recording_version",
+ "posthog_team"."signup_token",
+ "posthog_team"."is_demo",
+ "posthog_team"."access_control",
+ "posthog_team"."week_start_day",
+ "posthog_team"."inject_web_apps",
+ "posthog_team"."test_account_filters",
+ "posthog_team"."test_account_filters_default_checked",
+ "posthog_team"."path_cleaning_filters",
+ "posthog_team"."timezone",
+ "posthog_team"."data_attributes",
+ "posthog_team"."person_display_name_properties",
+ "posthog_team"."live_events_columns",
+ "posthog_team"."recording_domains",
+ "posthog_team"."primary_dashboard_id",
+ "posthog_team"."extra_settings",
+ "posthog_team"."modifiers",
+ "posthog_team"."correlation_config",
+ "posthog_team"."session_recording_retention_period_days",
+ "posthog_team"."external_data_workspace_id",
+ "posthog_team"."external_data_workspace_last_synced_at"
+ FROM "posthog_team"
+ WHERE "posthog_team"."id" = 99999
+ LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.1
'''
SELECT "posthog_featureflag"."id",
"posthog_featureflag"."key",
@@ -1331,12 +1375,13 @@
"posthog_featureflag"."usage_dashboard_id",
"posthog_featureflag"."has_enriched_analytics"
FROM "posthog_featureflag"
+ INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id")
WHERE ("posthog_featureflag"."key" = 'some-feature2'
- AND "posthog_featureflag"."team_id" = 99999)
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.1
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.2
'''
SELECT "posthog_cohort"."id",
"posthog_cohort"."name",
@@ -1357,12 +1402,13 @@
"posthog_cohort"."is_static",
"posthog_cohort"."groups"
FROM "posthog_cohort"
+ INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id")
WHERE ("posthog_cohort"."id" = 99999
- AND "posthog_cohort"."team_id" = 99999)
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.2
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.3
'''
SELECT "posthog_person"."id",
"posthog_person"."created_at",
@@ -1383,7 +1429,7 @@
LIMIT 1000
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.3
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.4
'''
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -1403,20 +1449,6 @@
5 /* ... */))
'''
# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.4
- '''
- SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key",
- "posthog_featureflaghashkeyoverride"."hash_key",
- "posthog_featureflaghashkeyoverride"."person_id"
- FROM "posthog_featureflaghashkeyoverride"
- WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (1,
- 2,
- 3,
- 4,
- 5 /* ... */)
- AND "posthog_featureflaghashkeyoverride"."team_id" = 99999)
- '''
-# ---
# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.5
'''
SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key",
@@ -1427,128 +1459,72 @@
2,
3,
4,
- 5 /* ... */)
- AND "posthog_featureflaghashkeyoverride"."team_id" = 99999)
- '''
-# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.6
- '''
- SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key",
- "posthog_featureflaghashkeyoverride"."hash_key",
- "posthog_featureflaghashkeyoverride"."person_id"
- FROM "posthog_featureflaghashkeyoverride"
- WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (1,
- 2,
- 3,
- 4,
- 5 /* ... */)
- AND "posthog_featureflaghashkeyoverride"."team_id" = 99999)
- '''
-# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.7
- '''
- SELECT "posthog_person"."id",
- "posthog_person"."created_at",
- "posthog_person"."properties_last_updated_at",
- "posthog_person"."properties_last_operation",
- "posthog_person"."team_id",
- "posthog_person"."properties",
- "posthog_person"."is_user_id",
- "posthog_person"."is_identified",
- "posthog_person"."uuid",
- "posthog_person"."version"
- FROM "posthog_person"
- WHERE ("posthog_person"."team_id" = 99999
- AND ("posthog_person"."properties" -> 'key') = '"value"'::jsonb
- AND "posthog_person"."properties" ? 'key'
- AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb))
- ORDER BY "posthog_person"."id" ASC
- LIMIT 1000
- OFFSET 1000
- '''
-# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.8
- '''
- SELECT "posthog_person"."uuid"
- FROM "posthog_person"
- WHERE ("posthog_person"."team_id" = 99999
- AND "posthog_person"."uuid" IN ('00000000000040008000000000000002'::uuid)
- AND NOT (EXISTS
- (SELECT 1 AS "a"
- FROM "posthog_cohortpeople" U1
- WHERE (U1."cohort_id" = 99999
- AND U1."person_id" = ("posthog_person"."id"))
- LIMIT 1)))
- '''
-# ---
-# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.9
- '''
- SELECT "posthog_team"."id",
- "posthog_team"."uuid",
- "posthog_team"."organization_id",
- "posthog_team"."project_id",
- "posthog_team"."api_token",
- "posthog_team"."app_urls",
- "posthog_team"."name",
- "posthog_team"."slack_incoming_webhook",
- "posthog_team"."created_at",
- "posthog_team"."updated_at",
- "posthog_team"."anonymize_ips",
- "posthog_team"."completed_snippet_onboarding",
- "posthog_team"."has_completed_onboarding_for",
- "posthog_team"."ingested_event",
- "posthog_team"."autocapture_opt_out",
- "posthog_team"."autocapture_web_vitals_opt_in",
- "posthog_team"."autocapture_web_vitals_allowed_metrics",
- "posthog_team"."autocapture_exceptions_opt_in",
- "posthog_team"."autocapture_exceptions_errors_to_ignore",
- "posthog_team"."person_processing_opt_out",
- "posthog_team"."session_recording_opt_in",
- "posthog_team"."session_recording_sample_rate",
- "posthog_team"."session_recording_minimum_duration_milliseconds",
- "posthog_team"."session_recording_linked_flag",
- "posthog_team"."session_recording_network_payload_capture_config",
- "posthog_team"."session_recording_url_trigger_config",
- "posthog_team"."session_recording_url_blocklist_config",
- "posthog_team"."session_recording_event_trigger_config",
- "posthog_team"."session_replay_config",
- "posthog_team"."survey_config",
- "posthog_team"."capture_console_log_opt_in",
- "posthog_team"."capture_performance_opt_in",
- "posthog_team"."capture_dead_clicks",
- "posthog_team"."surveys_opt_in",
- "posthog_team"."heatmaps_opt_in",
- "posthog_team"."session_recording_version",
- "posthog_team"."signup_token",
- "posthog_team"."is_demo",
- "posthog_team"."access_control",
- "posthog_team"."week_start_day",
- "posthog_team"."inject_web_apps",
- "posthog_team"."test_account_filters",
- "posthog_team"."test_account_filters_default_checked",
- "posthog_team"."path_cleaning_filters",
- "posthog_team"."timezone",
- "posthog_team"."data_attributes",
- "posthog_team"."person_display_name_properties",
- "posthog_team"."live_events_columns",
- "posthog_team"."recording_domains",
- "posthog_team"."primary_dashboard_id",
- "posthog_team"."extra_settings",
- "posthog_team"."modifiers",
- "posthog_team"."correlation_config",
- "posthog_team"."session_recording_retention_period_days",
- "posthog_team"."plugins_opt_in",
- "posthog_team"."opt_out_capture",
- "posthog_team"."event_names",
- "posthog_team"."event_names_with_usage",
- "posthog_team"."event_properties",
- "posthog_team"."event_properties_with_usage",
- "posthog_team"."event_properties_numerical",
- "posthog_team"."external_data_workspace_id",
- "posthog_team"."external_data_workspace_last_synced_at"
- FROM "posthog_team"
- WHERE "posthog_team"."id" = 99999
- LIMIT 21
+ 5 /* ... */)
+ AND "posthog_featureflaghashkeyoverride"."team_id" = 99999)
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.6
+ '''
+ SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key",
+ "posthog_featureflaghashkeyoverride"."hash_key",
+ "posthog_featureflaghashkeyoverride"."person_id"
+ FROM "posthog_featureflaghashkeyoverride"
+ WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (1,
+ 2,
+ 3,
+ 4,
+ 5 /* ... */)
+ AND "posthog_featureflaghashkeyoverride"."team_id" = 99999)
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.7
+ '''
+ SELECT "posthog_featureflaghashkeyoverride"."feature_flag_key",
+ "posthog_featureflaghashkeyoverride"."hash_key",
+ "posthog_featureflaghashkeyoverride"."person_id"
+ FROM "posthog_featureflaghashkeyoverride"
+ WHERE ("posthog_featureflaghashkeyoverride"."person_id" IN (1,
+ 2,
+ 3,
+ 4,
+ 5 /* ... */)
+ AND "posthog_featureflaghashkeyoverride"."team_id" = 99999)
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.8
+ '''
+ SELECT "posthog_person"."id",
+ "posthog_person"."created_at",
+ "posthog_person"."properties_last_updated_at",
+ "posthog_person"."properties_last_operation",
+ "posthog_person"."team_id",
+ "posthog_person"."properties",
+ "posthog_person"."is_user_id",
+ "posthog_person"."is_identified",
+ "posthog_person"."uuid",
+ "posthog_person"."version"
+ FROM "posthog_person"
+ WHERE ("posthog_person"."team_id" = 99999
+ AND ("posthog_person"."properties" -> 'key') = '"value"'::jsonb
+ AND "posthog_person"."properties" ? 'key'
+ AND NOT (("posthog_person"."properties" -> 'key') = 'null'::jsonb))
+ ORDER BY "posthog_person"."id" ASC
+ LIMIT 1000
+ OFFSET 1000
+ '''
+# ---
+# name: TestCohortGenerationForFeatureFlag.test_creating_static_cohort_with_experience_continuity_flag.9
+ '''
+ SELECT "posthog_person"."uuid"
+ FROM "posthog_person"
+ WHERE ("posthog_person"."team_id" = 99999
+ AND "posthog_person"."uuid" IN ('00000000000040008000000000000002'::uuid)
+ AND NOT (EXISTS
+ (SELECT 1 AS "a"
+ FROM "posthog_cohortpeople" U1
+ WHERE (U1."cohort_id" = 99999
+ AND U1."person_id" = ("posthog_person"."id"))
+ LIMIT 1)))
'''
# ---
# name: TestFeatureFlag.test_cant_create_flag_with_data_that_fails_to_query
@@ -1668,6 +1644,33 @@
'''
# ---
# name: TestFeatureFlag.test_creating_static_cohort.10
+ '''
+ SELECT "posthog_cohort"."id",
+ "posthog_cohort"."name",
+ "posthog_cohort"."description",
+ "posthog_cohort"."team_id",
+ "posthog_cohort"."deleted",
+ "posthog_cohort"."filters",
+ "posthog_cohort"."query",
+ "posthog_cohort"."version",
+ "posthog_cohort"."pending_version",
+ "posthog_cohort"."count",
+ "posthog_cohort"."created_by_id",
+ "posthog_cohort"."created_at",
+ "posthog_cohort"."is_calculating",
+ "posthog_cohort"."last_calculation",
+ "posthog_cohort"."errors_calculating",
+ "posthog_cohort"."last_error_at",
+ "posthog_cohort"."is_static",
+ "posthog_cohort"."groups"
+ FROM "posthog_cohort"
+ INNER JOIN "posthog_team" ON ("posthog_cohort"."team_id" = "posthog_team"."id")
+ WHERE ("posthog_cohort"."id" = 99999
+ AND "posthog_team"."project_id" = 99999)
+ LIMIT 21
+ '''
+# ---
+# name: TestFeatureFlag.test_creating_static_cohort.11
'''
SELECT "posthog_person"."id",
"posthog_person"."created_at",
@@ -1688,7 +1691,7 @@
LIMIT 10000
'''
# ---
-# name: TestFeatureFlag.test_creating_static_cohort.11
+# name: TestFeatureFlag.test_creating_static_cohort.12
'''
SELECT "posthog_persondistinctid"."id",
"posthog_persondistinctid"."team_id",
@@ -1708,7 +1711,7 @@
5 /* ... */))
'''
# ---
-# name: TestFeatureFlag.test_creating_static_cohort.12
+# name: TestFeatureFlag.test_creating_static_cohort.13
'''
SELECT "posthog_person"."id",
"posthog_person"."created_at",
@@ -1730,7 +1733,7 @@
OFFSET 10000
'''
# ---
-# name: TestFeatureFlag.test_creating_static_cohort.13
+# name: TestFeatureFlag.test_creating_static_cohort.14
'''
SELECT "posthog_person"."uuid"
FROM "posthog_person"
@@ -1744,76 +1747,6 @@
LIMIT 1)))
'''
# ---
-# name: TestFeatureFlag.test_creating_static_cohort.14
- '''
- SELECT "posthog_team"."id",
- "posthog_team"."uuid",
- "posthog_team"."organization_id",
- "posthog_team"."project_id",
- "posthog_team"."api_token",
- "posthog_team"."app_urls",
- "posthog_team"."name",
- "posthog_team"."slack_incoming_webhook",
- "posthog_team"."created_at",
- "posthog_team"."updated_at",
- "posthog_team"."anonymize_ips",
- "posthog_team"."completed_snippet_onboarding",
- "posthog_team"."has_completed_onboarding_for",
- "posthog_team"."ingested_event",
- "posthog_team"."autocapture_opt_out",
- "posthog_team"."autocapture_web_vitals_opt_in",
- "posthog_team"."autocapture_web_vitals_allowed_metrics",
- "posthog_team"."autocapture_exceptions_opt_in",
- "posthog_team"."autocapture_exceptions_errors_to_ignore",
- "posthog_team"."person_processing_opt_out",
- "posthog_team"."session_recording_opt_in",
- "posthog_team"."session_recording_sample_rate",
- "posthog_team"."session_recording_minimum_duration_milliseconds",
- "posthog_team"."session_recording_linked_flag",
- "posthog_team"."session_recording_network_payload_capture_config",
- "posthog_team"."session_recording_url_trigger_config",
- "posthog_team"."session_recording_url_blocklist_config",
- "posthog_team"."session_recording_event_trigger_config",
- "posthog_team"."session_replay_config",
- "posthog_team"."survey_config",
- "posthog_team"."capture_console_log_opt_in",
- "posthog_team"."capture_performance_opt_in",
- "posthog_team"."capture_dead_clicks",
- "posthog_team"."surveys_opt_in",
- "posthog_team"."heatmaps_opt_in",
- "posthog_team"."session_recording_version",
- "posthog_team"."signup_token",
- "posthog_team"."is_demo",
- "posthog_team"."access_control",
- "posthog_team"."week_start_day",
- "posthog_team"."inject_web_apps",
- "posthog_team"."test_account_filters",
- "posthog_team"."test_account_filters_default_checked",
- "posthog_team"."path_cleaning_filters",
- "posthog_team"."timezone",
- "posthog_team"."data_attributes",
- "posthog_team"."person_display_name_properties",
- "posthog_team"."live_events_columns",
- "posthog_team"."recording_domains",
- "posthog_team"."primary_dashboard_id",
- "posthog_team"."extra_settings",
- "posthog_team"."modifiers",
- "posthog_team"."correlation_config",
- "posthog_team"."session_recording_retention_period_days",
- "posthog_team"."plugins_opt_in",
- "posthog_team"."opt_out_capture",
- "posthog_team"."event_names",
- "posthog_team"."event_names_with_usage",
- "posthog_team"."event_properties",
- "posthog_team"."event_properties_with_usage",
- "posthog_team"."event_properties_numerical",
- "posthog_team"."external_data_workspace_id",
- "posthog_team"."external_data_workspace_last_synced_at"
- FROM "posthog_team"
- WHERE "posthog_team"."id" = 99999
- LIMIT 21
- '''
-# ---
# name: TestFeatureFlag.test_creating_static_cohort.15
'''
SELECT "posthog_team"."id",
@@ -2141,6 +2074,69 @@
'''
# ---
# name: TestFeatureFlag.test_creating_static_cohort.8
+ '''
+ SELECT "posthog_team"."id",
+ "posthog_team"."uuid",
+ "posthog_team"."organization_id",
+ "posthog_team"."project_id",
+ "posthog_team"."api_token",
+ "posthog_team"."app_urls",
+ "posthog_team"."name",
+ "posthog_team"."slack_incoming_webhook",
+ "posthog_team"."created_at",
+ "posthog_team"."updated_at",
+ "posthog_team"."anonymize_ips",
+ "posthog_team"."completed_snippet_onboarding",
+ "posthog_team"."has_completed_onboarding_for",
+ "posthog_team"."ingested_event",
+ "posthog_team"."autocapture_opt_out",
+ "posthog_team"."autocapture_web_vitals_opt_in",
+ "posthog_team"."autocapture_web_vitals_allowed_metrics",
+ "posthog_team"."autocapture_exceptions_opt_in",
+ "posthog_team"."autocapture_exceptions_errors_to_ignore",
+ "posthog_team"."person_processing_opt_out",
+ "posthog_team"."session_recording_opt_in",
+ "posthog_team"."session_recording_sample_rate",
+ "posthog_team"."session_recording_minimum_duration_milliseconds",
+ "posthog_team"."session_recording_linked_flag",
+ "posthog_team"."session_recording_network_payload_capture_config",
+ "posthog_team"."session_recording_url_trigger_config",
+ "posthog_team"."session_recording_url_blocklist_config",
+ "posthog_team"."session_recording_event_trigger_config",
+ "posthog_team"."session_replay_config",
+ "posthog_team"."survey_config",
+ "posthog_team"."capture_console_log_opt_in",
+ "posthog_team"."capture_performance_opt_in",
+ "posthog_team"."capture_dead_clicks",
+ "posthog_team"."surveys_opt_in",
+ "posthog_team"."heatmaps_opt_in",
+ "posthog_team"."session_recording_version",
+ "posthog_team"."signup_token",
+ "posthog_team"."is_demo",
+ "posthog_team"."access_control",
+ "posthog_team"."week_start_day",
+ "posthog_team"."inject_web_apps",
+ "posthog_team"."test_account_filters",
+ "posthog_team"."test_account_filters_default_checked",
+ "posthog_team"."path_cleaning_filters",
+ "posthog_team"."timezone",
+ "posthog_team"."data_attributes",
+ "posthog_team"."person_display_name_properties",
+ "posthog_team"."live_events_columns",
+ "posthog_team"."recording_domains",
+ "posthog_team"."primary_dashboard_id",
+ "posthog_team"."extra_settings",
+ "posthog_team"."modifiers",
+ "posthog_team"."correlation_config",
+ "posthog_team"."session_recording_retention_period_days",
+ "posthog_team"."external_data_workspace_id",
+ "posthog_team"."external_data_workspace_last_synced_at"
+ FROM "posthog_team"
+ WHERE "posthog_team"."id" = 99999
+ LIMIT 21
+ '''
+# ---
+# name: TestFeatureFlag.test_creating_static_cohort.9
'''
SELECT "posthog_featureflag"."id",
"posthog_featureflag"."key",
@@ -2158,34 +2154,9 @@
"posthog_featureflag"."usage_dashboard_id",
"posthog_featureflag"."has_enriched_analytics"
FROM "posthog_featureflag"
+ INNER JOIN "posthog_team" ON ("posthog_featureflag"."team_id" = "posthog_team"."id")
WHERE ("posthog_featureflag"."key" = 'some-feature'
- AND "posthog_featureflag"."team_id" = 99999)
- LIMIT 21
- '''
-# ---
-# name: TestFeatureFlag.test_creating_static_cohort.9
- '''
- SELECT "posthog_cohort"."id",
- "posthog_cohort"."name",
- "posthog_cohort"."description",
- "posthog_cohort"."team_id",
- "posthog_cohort"."deleted",
- "posthog_cohort"."filters",
- "posthog_cohort"."query",
- "posthog_cohort"."version",
- "posthog_cohort"."pending_version",
- "posthog_cohort"."count",
- "posthog_cohort"."created_by_id",
- "posthog_cohort"."created_at",
- "posthog_cohort"."is_calculating",
- "posthog_cohort"."last_calculation",
- "posthog_cohort"."errors_calculating",
- "posthog_cohort"."last_error_at",
- "posthog_cohort"."is_static",
- "posthog_cohort"."groups"
- FROM "posthog_cohort"
- WHERE ("posthog_cohort"."id" = 99999
- AND "posthog_cohort"."team_id" = 99999)
+ AND "posthog_team"."project_id" = 99999)
LIMIT 21
'''
# ---
diff --git a/posthog/api/test/test_cohort.py b/posthog/api/test/test_cohort.py
index 5e16d6b7bc519..8a32a8b9a5dd0 100644
--- a/posthog/api/test/test_cohort.py
+++ b/posthog/api/test/test_cohort.py
@@ -1111,7 +1111,7 @@ def _calc(query: str) -> int:
self.assertEqual(1, _calc("select 1 from events"))
# raises on all other cases
- response = self.client.post(
+ query_post_response = self.client.post(
f"/api/projects/{self.team.id}/cohorts",
data={
"name": "cohort A",
@@ -1122,7 +1122,15 @@ def _calc(query: str) -> int:
},
},
)
- self.assertEqual(response.status_code, 500, response.content)
+ query_get_response = self.client.get(
+ f"/api/projects/{self.team.id}/cohorts/{query_post_response.json()['id']}/"
+ )
+
+ self.assertEqual(query_post_response.status_code, 201)
+ self.assertEqual(query_get_response.status_code, 200)
+ self.assertEqual(
+ query_get_response.json()["errors_calculating"], 1
+ ) # Should be because selecting from groups is not allowed
@patch("posthog.api.cohort.report_user_action")
def test_cohort_with_is_set_filter_missing_value(self, patch_capture):
diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py
index 86cee8950dfdf..7ffb834c41e4a 100644
--- a/posthog/api/test/test_feature_flag.py
+++ b/posthog/api/test/test_feature_flag.py
@@ -1259,7 +1259,7 @@ def test_my_flags_is_not_nplus1(self) -> None:
format="json",
).json()
- with self.assertNumQueries(FuzzyInt(7, 8)):
+ with self.assertNumQueries(FuzzyInt(8, 9)):
response = self.client.get(f"/api/projects/{self.team.id}/feature_flags/my_flags")
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -2229,7 +2229,7 @@ def test_local_evaluation_for_invalid_cohorts(self, mock_capture):
self.client.logout()
- with self.assertNumQueries(16):
+ with self.assertNumQueries(18):
# 1. SAVEPOINT
# 2. SELECT "posthog_personalapikey"."id",
# 3. RELEASE SAVEPOINT
@@ -2242,10 +2242,12 @@ def test_local_evaluation_for_invalid_cohorts(self, mock_capture):
# 10. SELECT "posthog_organizationmembership"."id",
# 11. SELECT "posthog_cohort"."id" -- all cohorts
# 12. SELECT "posthog_featureflag"."id", "posthog_featureflag"."key", -- all flags
- # 13. SELECT "posthog_cohort". id = 99999
- # 14. SELECT "posthog_cohort". id = deleted cohort
- # 15. SELECT "posthog_cohort". id = cohort from other team
- # 16. SELECT "posthog_grouptypemapping"."id", -- group type mapping
+ # 13. SELECT "posthog_team"."id", "posthog_team"."uuid",
+ # 14. SELECT "posthog_cohort". id = 99999
+ # 15. SELECT "posthog_team"."id", "posthog_team"."uuid",
+ # 16. SELECT "posthog_cohort". id = deleted cohort
+ # 17. SELECT "posthog_cohort". id = cohort from other team
+ # 18. SELECT "posthog_grouptypemapping"."id", -- group type mapping
response = self.client.get(
f"/api/feature_flag/local_evaluation?token={self.team.api_token}&send_cohorts",
@@ -4230,7 +4232,7 @@ def test_creating_static_cohort_with_deleted_flag(self):
name="some cohort",
)
- with self.assertNumQueries(1):
+ with self.assertNumQueries(2):
get_cohort_actors_for_feature_flag(cohort.pk, "some-feature", self.team.pk)
cohort.refresh_from_db()
@@ -4268,7 +4270,7 @@ def test_creating_static_cohort_with_inactive_flag(self):
name="some cohort",
)
- with self.assertNumQueries(1):
+ with self.assertNumQueries(2):
get_cohort_actors_for_feature_flag(cohort.pk, "some-feature2", self.team.pk)
cohort.refresh_from_db()
@@ -4307,7 +4309,7 @@ def test_creating_static_cohort_with_group_flag(self):
name="some cohort",
)
- with self.assertNumQueries(1):
+ with self.assertNumQueries(2):
get_cohort_actors_for_feature_flag(cohort.pk, "some-feature3", self.team.pk)
cohort.refresh_from_db()
@@ -4339,7 +4341,7 @@ def test_creating_static_cohort_with_no_person_distinct_ids(self):
name="some cohort",
)
- with self.assertNumQueries(5):
+ with self.assertNumQueries(6):
get_cohort_actors_for_feature_flag(cohort.pk, "some-feature2", self.team.pk)
cohort.refresh_from_db()
@@ -4357,7 +4359,7 @@ def test_creating_static_cohort_with_non_existing_flag(self):
name="some cohort",
)
- with self.assertNumQueries(1):
+ with self.assertNumQueries(2):
get_cohort_actors_for_feature_flag(cohort.pk, "some-feature2", self.team.pk)
cohort.refresh_from_db()
diff --git a/posthog/api/test/test_organization.py b/posthog/api/test/test_organization.py
index 2396f78e3c557..143fbe3f524b9 100644
--- a/posthog/api/test/test_organization.py
+++ b/posthog/api/test/test_organization.py
@@ -1,4 +1,5 @@
from rest_framework import status
+from unittest.mock import patch, ANY
from posthog.models import Organization, OrganizationMembership, Team
from posthog.models.personal_api_key import PersonalAPIKey, hash_key_value
@@ -128,7 +129,8 @@ def test_cant_update_plugins_access_level(self):
self.organization.refresh_from_db()
self.assertEqual(self.organization.plugins_access_level, 3)
- def test_enforce_2fa_for_everyone(self):
+ @patch("posthoganalytics.capture")
+ def test_enforce_2fa_for_everyone(self, mock_capture):
# Only admins should be able to enforce 2fa
response = self.client.patch(f"/api/organizations/{self.organization.id}/", {"enforce_2fa": True})
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@@ -142,6 +144,19 @@ def test_enforce_2fa_for_everyone(self):
self.organization.refresh_from_db()
self.assertEqual(self.organization.enforce_2fa, True)
+ # Verify the capture event was called correctly
+ mock_capture.assert_any_call(
+ self.user.distinct_id,
+ "organization 2fa enforcement toggled",
+ properties={
+ "enabled": True,
+ "organization_id": str(self.organization.id),
+ "organization_name": self.organization.name,
+ "user_role": OrganizationMembership.Level.ADMIN,
+ },
+ groups={"instance": ANY, "organization": str(self.organization.id)},
+ )
+
def test_projects_outside_personal_api_key_scoped_organizations_not_listed(self):
other_org, _, _ = Organization.objects.bootstrap(self.user)
personal_api_key = generate_random_token_personal()
diff --git a/posthog/api/test/test_person.py b/posthog/api/test/test_person.py
index 2c9694f6eda6d..29eb3990407d5 100644
--- a/posthog/api/test/test_person.py
+++ b/posthog/api/test/test_person.py
@@ -873,7 +873,7 @@ def test_pagination_limit(self):
create_person(team_id=self.team.pk, version=0)
returned_ids = []
- with self.assertNumQueries(10):
+ with self.assertNumQueries(9):
response = self.client.get("/api/person/?limit=10").json()
self.assertEqual(len(response["results"]), 9)
returned_ids += [x["distinct_ids"][0] for x in response["results"]]
diff --git a/posthog/api/test/test_team.py b/posthog/api/test/test_team.py
index 0040ddd257e2d..0e40b6a595d36 100644
--- a/posthog/api/test/test_team.py
+++ b/posthog/api/test/test_team.py
@@ -16,6 +16,7 @@
from posthog.models import ActivityLog, EarlyAccessFeature
from posthog.models.async_deletion.async_deletion import AsyncDeletion, DeletionType
from posthog.models.dashboard import Dashboard
+from posthog.models.group_type_mapping import GroupTypeMapping
from posthog.models.instance_setting import get_instance_setting
from posthog.models.organization import Organization, OrganizationMembership
from posthog.models.personal_api_key import PersonalAPIKey, hash_key_value
@@ -87,6 +88,26 @@ def test_retrieve_team(self):
self.assertNotIn("event_names_with_usage", response_data)
self.assertNotIn("event_properties_with_usage", response_data)
+ def test_retrieve_team_has_group_types(self):
+ other_team = Team.objects.create(organization=self.organization, project=self.project)
+
+ response = self.client.get("/api/environments/@current/")
+ response_data = response.json()
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK, response_data)
+ self.assertEqual(response_data["has_group_types"], False)
+
+ # Creating a group type in the same project, but different team
+ GroupTypeMapping.objects.create(
+ project=self.project, team=other_team, group_type="person", group_type_index=0
+ )
+
+ response = self.client.get("/api/environments/@current/")
+ response_data = response.json()
+
+ self.assertEqual(response.status_code, status.HTTP_200_OK, response_data)
+ self.assertEqual(response_data["has_group_types"], True) # Irreleveant that group type has different `team`
+
def test_cant_retrieve_team_from_another_org(self):
org = Organization.objects.create(name="New Org")
team = Team.objects.create(organization=org, name="Default project")
diff --git a/posthog/clickhouse/cluster.py b/posthog/clickhouse/cluster.py
index 3aa67c94ff3b5..75c91db9da75f 100644
--- a/posthog/clickhouse/cluster.py
+++ b/posthog/clickhouse/cluster.py
@@ -52,7 +52,9 @@ def result(
class ConnectionInfo(NamedTuple):
address: str
- port: int
+
+ def make_pool(self) -> ChPool:
+ return make_ch_pool(host=self.address)
class HostInfo(NamedTuple):
@@ -67,10 +69,10 @@ class HostInfo(NamedTuple):
class ClickhouseCluster:
def __init__(self, bootstrap_client: Client, extra_hosts: Sequence[ConnectionInfo] | None = None) -> None:
self.__hosts = [
- HostInfo(ConnectionInfo(host_address, port), shard_num, replica_num)
- for (host_address, port, shard_num, replica_num) in bootstrap_client.execute(
+ HostInfo(ConnectionInfo(host_address), shard_num, replica_num)
+ for (host_address, shard_num, replica_num) in bootstrap_client.execute(
"""
- SELECT host_address, port, shard_num, replica_num
+ SELECT host_address, shard_num, replica_num
FROM system.clusters
WHERE name = %(name)s
ORDER BY shard_num, replica_num
@@ -87,7 +89,7 @@ def __init__(self, bootstrap_client: Client, extra_hosts: Sequence[ConnectionInf
def __get_task_function(self, host: HostInfo, fn: Callable[[Client], T]) -> Callable[[], T]:
pool = self.__pools.get(host)
if pool is None:
- pool = self.__pools[host] = make_ch_pool(host=host.connection_info.address, port=host.connection_info.port)
+ pool = self.__pools[host] = host.connection_info.make_pool()
def task():
with pool.get_client() as client:
diff --git a/posthog/clickhouse/materialized_columns.py b/posthog/clickhouse/materialized_columns.py
index 2ff858274ab4d..09b2d8b24c6dc 100644
--- a/posthog/clickhouse/materialized_columns.py
+++ b/posthog/clickhouse/materialized_columns.py
@@ -1,6 +1,6 @@
-from datetime import timedelta
+from typing import Protocol
-from posthog.cache_utils import cache_for
+from posthog.models.instance_setting import get_instance_setting
from posthog.models.property import PropertyName, TableColumn, TableWithProperties
from posthog.settings import EE_AVAILABLE
@@ -8,19 +8,25 @@
ColumnName = str
TablesWithMaterializedColumns = TableWithProperties
+
+class MaterializedColumn(Protocol):
+ name: ColumnName
+ is_nullable: bool
+
+
if EE_AVAILABLE:
- from ee.clickhouse.materialized_columns.columns import get_materialized_columns
-else:
+ from ee.clickhouse.materialized_columns.columns import get_enabled_materialized_columns
- def get_materialized_columns(
- table: TablesWithMaterializedColumns,
- exclude_disabled_columns: bool = False,
- ) -> dict[tuple[PropertyName, TableColumn], ColumnName]:
- return {}
+ def get_materialized_column_for_property(
+ table: TablesWithMaterializedColumns, table_column: TableColumn, property_name: PropertyName
+ ) -> MaterializedColumn | None:
+ if not get_instance_setting("MATERIALIZED_COLUMNS_ENABLED"):
+ return None
+ return get_enabled_materialized_columns(table).get((property_name, table_column))
+else:
-@cache_for(timedelta(minutes=15))
-def get_enabled_materialized_columns(
- table: TablesWithMaterializedColumns,
-) -> dict[tuple[PropertyName, TableColumn], ColumnName]:
- return get_materialized_columns(table, exclude_disabled_columns=True)
+ def get_materialized_column_for_property(
+ table: TablesWithMaterializedColumns, table_column: TableColumn, property_name: PropertyName
+ ) -> MaterializedColumn | None:
+ return None
diff --git a/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py b/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py
index 7e7847c570bac..1be2a1c033c66 100644
--- a/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py
+++ b/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py
@@ -1,6 +1,6 @@
from infi.clickhouse_orm import migrations
-from posthog.clickhouse.materialized_columns import get_materialized_columns
+from posthog.clickhouse.materialized_columns import get_materialized_column_for_property
from posthog.client import sync_execute
from posthog.settings import CLICKHOUSE_CLUSTER
@@ -45,9 +45,9 @@ def materialize_session_and_window_id(database):
properties = ["$session_id", "$window_id"]
for property_name in properties:
- materialized_columns = get_materialized_columns("events")
+ current_materialized_column = get_materialized_column_for_property("events", "properties", property_name)
# If the column is not materialized, materialize it
- if (property_name, "properties") not in materialized_columns:
+ if current_materialized_column is None:
materialize("events", property_name, property_name)
# Now, we need to clean up any potentail inconsistencies with existing column names
@@ -71,9 +71,8 @@ def materialize_session_and_window_id(database):
# materialized the column or renamed the column, and then ran the 0004_... async migration
# before this migration runs.
possible_old_column_names = {"mat_" + property_name}
- current_materialized_column_name = materialized_columns.get((property_name, "properties"), None)
- if current_materialized_column_name is not None and current_materialized_column_name != property_name:
- possible_old_column_names.add(current_materialized_column_name)
+ if current_materialized_column is not None and current_materialized_column.name != property_name:
+ possible_old_column_names.add(current_materialized_column.name)
for possible_old_column_name in possible_old_column_names:
ensure_only_new_column_exists(database, "sharded_events", possible_old_column_name, property_name)
diff --git a/posthog/demo/matrix/manager.py b/posthog/demo/matrix/manager.py
index 0abc17f32ca08..f52a74fa6ba9a 100644
--- a/posthog/demo/matrix/manager.py
+++ b/posthog/demo/matrix/manager.py
@@ -204,7 +204,7 @@ def _erase_master_team_data(cls):
# )
# ]
# )
- GroupTypeMapping.objects.filter(team_id=cls.MASTER_TEAM_ID).delete()
+ GroupTypeMapping.objects.filter(project_id=cls.MASTER_TEAM_ID).delete()
def _copy_analytics_data_from_master_team(self, target_team: Team):
from posthog.models.event.sql import COPY_EVENTS_BETWEEN_TEAMS
@@ -222,11 +222,11 @@ def _copy_analytics_data_from_master_team(self, target_team: Team):
sync_execute(COPY_PERSON_DISTINCT_ID2S_BETWEEN_TEAMS, copy_params)
sync_execute(COPY_EVENTS_BETWEEN_TEAMS, copy_params)
sync_execute(COPY_GROUPS_BETWEEN_TEAMS, copy_params)
- GroupTypeMapping.objects.filter(team_id=target_team.pk).delete()
+ GroupTypeMapping.objects.filter(project_id=target_team.project_id).delete()
GroupTypeMapping.objects.bulk_create(
(
- GroupTypeMapping(team=target_team, project_id=target_team.project_id, **record)
- for record in GroupTypeMapping.objects.filter(team_id=self.MASTER_TEAM_ID).values(
+ GroupTypeMapping(team_id=target_team.id, project_id=target_team.project_id, **record)
+ for record in GroupTypeMapping.objects.filter(project_id=self.MASTER_TEAM_ID).values(
"group_type", "group_type_index", "name_singular", "name_plural"
)
),
diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py
index 37370800f30c3..9ca4500aa2abd 100644
--- a/posthog/hogql/database/database.py
+++ b/posthog/hogql/database/database.py
@@ -287,7 +287,7 @@ def create_hogql_database(
"$virt_initial_channel_type", modifiers.customChannelTypeRules
)
- for mapping in GroupTypeMapping.objects.filter(team=team):
+ for mapping in GroupTypeMapping.objects.filter(project_id=team.project_id):
if database.events.fields.get(mapping.group_type) is None:
database.events.fields[mapping.group_type] = FieldTraverser(chain=[f"group_{mapping.group_type_index}"])
diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py
index 37fea932f2014..418e2f6354807 100644
--- a/posthog/hogql/printer.py
+++ b/posthog/hogql/printer.py
@@ -6,7 +6,11 @@
from typing import Literal, Optional, Union, cast
from uuid import UUID
-from posthog.clickhouse.materialized_columns import TablesWithMaterializedColumns, get_enabled_materialized_columns
+from posthog.clickhouse.materialized_columns import (
+ MaterializedColumn,
+ TablesWithMaterializedColumns,
+ get_materialized_column_for_property,
+)
from posthog.clickhouse.property_groups import property_groups
from posthog.hogql import ast
from posthog.hogql.base import AST, _T_AST
@@ -197,6 +201,7 @@ class JoinExprResponse:
class PrintableMaterializedColumn:
table: Optional[str]
column: str
+ is_nullable: bool
def __str__(self) -> str:
if self.table is None:
@@ -1321,10 +1326,11 @@ def __get_all_materialized_property_sources(
field_name = cast(Union[Literal["properties"], Literal["person_properties"]], field.name)
materialized_column = self._get_materialized_column(table_name, property_name, field_name)
- if materialized_column:
+ if materialized_column is not None:
yield PrintableMaterializedColumn(
self.visit(field_type.table_type),
- self._print_identifier(materialized_column),
+ self._print_identifier(materialized_column.name),
+ is_nullable=materialized_column.is_nullable,
)
if self.context.modifiers.propertyGroupsMode in (
@@ -1352,8 +1358,12 @@ def __get_all_materialized_property_sources(
materialized_column = self._get_materialized_column("events", property_name, "person_properties")
else:
materialized_column = self._get_materialized_column("person", property_name, "properties")
- if materialized_column:
- yield PrintableMaterializedColumn(None, self._print_identifier(materialized_column))
+ if materialized_column is not None:
+ yield PrintableMaterializedColumn(
+ None,
+ self._print_identifier(materialized_column.name),
+ is_nullable=materialized_column.is_nullable,
+ )
def visit_property_type(self, type: ast.PropertyType):
if type.joined_subquery is not None and type.joined_subquery_field_name is not None:
@@ -1361,7 +1371,10 @@ def visit_property_type(self, type: ast.PropertyType):
materialized_property_source = self.__get_materialized_property_source_for_property_type(type)
if materialized_property_source is not None:
- if isinstance(materialized_property_source, PrintableMaterializedColumn):
+ if (
+ isinstance(materialized_property_source, PrintableMaterializedColumn)
+ and not materialized_property_source.is_nullable
+ ):
# TODO: rematerialize all columns to properly support empty strings and "null" string values.
if self.context.modifiers.materializationMode == MaterializationMode.LEGACY_NULL_AS_STRING:
materialized_property_sql = f"nullIf({materialized_property_source}, '')"
@@ -1511,9 +1524,10 @@ def _unsafe_json_extract_trim_quotes(self, unsafe_field: str, unsafe_args: list[
def _get_materialized_column(
self, table_name: str, property_name: PropertyName, field_name: TableColumn
- ) -> Optional[str]:
- materialized_columns = get_enabled_materialized_columns(cast(TablesWithMaterializedColumns, table_name))
- return materialized_columns.get((property_name, field_name), None)
+ ) -> MaterializedColumn | None:
+ return get_materialized_column_for_property(
+ cast(TablesWithMaterializedColumns, table_name), field_name, property_name
+ )
def _get_timezone(self) -> str:
return self.context.database.get_timezone() if self.context.database else "UTC"
diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py
index 8d7dad46040ac..4f2422263d0c8 100644
--- a/posthog/hogql/test/test_printer.py
+++ b/posthog/hogql/test/test_printer.py
@@ -460,14 +460,22 @@ def test_hogql_properties_materialized_json_access(self):
self.assertEqual(1 + 2, 3)
return
- materialize("events", "withmat")
context = HogQLContext(team_id=self.team.pk)
+ materialize("events", "withmat")
self.assertEqual(
self._expr("properties.withmat.json.yet", context),
"replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(nullIf(nullIf(events.mat_withmat, ''), 'null'), %(hogql_val_0)s, %(hogql_val_1)s), ''), 'null'), '^\"|\"$', '')",
)
self.assertEqual(context.values, {"hogql_val_0": "json", "hogql_val_1": "yet"})
+ context = HogQLContext(team_id=self.team.pk)
+ materialize("events", "withmat_nullable", is_nullable=True)
+ self.assertEqual(
+ self._expr("properties.withmat_nullable.json.yet", context),
+ "replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.mat_withmat_nullable, %(hogql_val_0)s, %(hogql_val_1)s), ''), 'null'), '^\"|\"$', '')",
+ )
+ self.assertEqual(context.values, {"hogql_val_0": "json", "hogql_val_1": "yet"})
+
def test_materialized_fields_and_properties(self):
try:
from ee.clickhouse.materialized_columns.analyze import materialize
@@ -499,6 +507,12 @@ def test_materialized_fields_and_properties(self):
"nullIf(nullIf(events.`mat_$browser_______`, ''), 'null')",
)
+ materialize("events", "nullable_property", is_nullable=True)
+ self.assertEqual(
+ self._expr("properties['nullable_property']"),
+ "events.mat_nullable_property",
+ )
+
def test_property_groups(self):
context = HogQLContext(
team_id=self.team.pk,
diff --git a/posthog/hogql/transforms/property_types.py b/posthog/hogql/transforms/property_types.py
index 6dbac74590da6..e561607629f1f 100644
--- a/posthog/hogql/transforms/property_types.py
+++ b/posthog/hogql/transforms/property_types.py
@@ -1,6 +1,10 @@
-from typing import Literal, Optional, cast
+from typing import Literal, cast
-from posthog.clickhouse.materialized_columns import TablesWithMaterializedColumns, get_enabled_materialized_columns
+from posthog.clickhouse.materialized_columns import (
+ MaterializedColumn,
+ TablesWithMaterializedColumns,
+ get_materialized_column_for_property,
+)
from posthog.hogql import ast
from posthog.hogql.context import HogQLContext
from posthog.hogql.database.models import (
@@ -258,7 +262,7 @@ def _add_property_notice(
message = f"{property_type.capitalize()} property '{property_name}' is of type '{field_type}'."
if self.context.debug:
- if materialized_column:
+ if materialized_column is not None:
message += " This property is materialized ⚡️."
else:
message += " This property is not materialized 🐢."
@@ -277,6 +281,7 @@ def _add_notice(self, node: ast.Field, message: str):
def _get_materialized_column(
self, table_name: str, property_name: PropertyName, field_name: TableColumn
- ) -> Optional[str]:
- materialized_columns = get_enabled_materialized_columns(cast(TablesWithMaterializedColumns, table_name))
- return materialized_columns.get((property_name, field_name), None)
+ ) -> MaterializedColumn | None:
+ return get_materialized_column_for_property(
+ cast(TablesWithMaterializedColumns, table_name), field_name, property_name
+ )
diff --git a/posthog/hogql_queries/actors_query_runner.py b/posthog/hogql_queries/actors_query_runner.py
index cec4b7019f212..cde5bd2d6311f 100644
--- a/posthog/hogql_queries/actors_query_runner.py
+++ b/posthog/hogql_queries/actors_query_runner.py
@@ -3,7 +3,7 @@
from collections.abc import Sequence, Iterator
from posthog.hogql import ast
-from posthog.hogql.constants import HogQLGlobalSettings
+from posthog.hogql.constants import HogQLGlobalSettings, HogQLQuerySettings
from posthog.hogql.parser import parse_expr, parse_order_expr
from posthog.hogql.property import has_aggregation
from posthog.hogql.resolver_utils import extract_select_queries
@@ -307,6 +307,7 @@ def to_query(self) -> ast.SelectQuery:
having=having,
group_by=group_by if has_any_aggregation else None,
order_by=order_by,
+ settings=HogQLQuerySettings(join_algorithm="auto", optimize_aggregation_in_order=True),
)
def to_actors_query(self) -> ast.SelectQuery:
diff --git a/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
index 98599790f3865..3f3127fcb1b55 100644
--- a/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
+++ b/posthog/hogql_queries/experiments/test/test_experiment_funnels_query_runner.py
@@ -16,6 +16,7 @@
from posthog.constants import ExperimentNoResultsErrorKeys
import json
from posthog.test.test_journeys import journeys_for
+from flaky import flaky
class TestExperimentFunnelsQueryRunner(ClickhouseTestMixin, APIBaseTest):
@@ -128,6 +129,7 @@ def test_query_runner(self):
self.assertIn("control", result.credible_intervals)
self.assertIn("test", result.credible_intervals)
+ @flaky(max_runs=10, min_passes=1)
@freeze_time("2020-01-01T12:00:00Z")
def test_query_runner_standard_flow(self):
feature_flag = self.create_feature_flag()
diff --git a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py
index 8837bfeab8607..4402afde55eec 100644
--- a/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py
+++ b/posthog/hogql_queries/experiments/test/test_experiment_trends_query_runner.py
@@ -29,6 +29,7 @@
from pyarrow import parquet as pq
import pyarrow as pa
import json
+from flaky import flaky
from boto3 import resource
from botocore.config import Config
@@ -650,6 +651,7 @@ def test_query_runner_with_avg_math(self):
prepared_count_query = query_runner.prepared_count_query
self.assertEqual(prepared_count_query.series[0].math, "sum")
+ @flaky(max_runs=10, min_passes=1)
@freeze_time("2020-01-01T12:00:00Z")
def test_query_runner_standard_flow(self):
feature_flag = self.create_feature_flag()
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr
index 2315f2b51ebf6..2f2933fb62433 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel.ambr
@@ -193,7 +193,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.created_at DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -612,7 +614,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.created_at DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -730,7 +734,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.created_at DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -848,7 +854,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.created_at DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1848,7 +1856,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1989,7 +1999,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2130,7 +2142,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2271,7 +2285,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr
index f95e83e21b1d9..4573056cf6cac 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation.ambr
@@ -482,7 +482,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -673,7 +675,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -864,7 +868,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1055,7 +1061,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1408,7 +1416,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1599,7 +1609,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1790,7 +1802,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1981,7 +1995,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2520,7 +2536,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2635,7 +2653,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2750,7 +2770,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2865,7 +2887,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3128,7 +3152,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3243,7 +3269,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3490,7 +3518,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3605,7 +3635,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3720,7 +3752,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3835,7 +3869,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4098,7 +4134,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4213,7 +4251,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4482,7 +4522,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4604,7 +4646,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4726,7 +4770,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4848,7 +4894,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5264,7 +5312,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5386,7 +5436,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5508,7 +5560,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5630,7 +5684,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -6046,7 +6102,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -6168,7 +6226,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -6290,7 +6350,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -6412,7 +6474,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -6828,7 +6892,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -6950,7 +7016,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -7072,7 +7140,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -7194,7 +7264,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -7610,7 +7682,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -7732,7 +7806,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -7854,7 +7930,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -7976,7 +8054,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr
index ca6d26d135828..ea2c02c121f49 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors.ambr
@@ -163,7 +163,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -428,7 +430,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -619,7 +623,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr
index dcec437b05683..f1f604cc85b02 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_actors_udf.ambr
@@ -107,7 +107,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -239,7 +241,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -374,7 +378,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr
index 0912fa7845d36..71680063ab927 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_correlation_udf.ambr
@@ -358,7 +358,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -493,7 +495,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -628,7 +632,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -763,7 +769,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1026,7 +1034,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1161,7 +1171,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1296,7 +1308,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1431,7 +1445,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1820,7 +1836,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1887,7 +1905,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1954,7 +1974,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2021,7 +2043,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2202,7 +2226,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2269,7 +2295,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2434,7 +2462,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2501,7 +2531,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2568,7 +2600,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2635,7 +2669,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2816,7 +2852,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2883,7 +2921,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3070,7 +3110,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3144,7 +3186,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3218,7 +3262,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3292,7 +3338,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3592,7 +3640,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3666,7 +3716,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3740,7 +3792,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3814,7 +3868,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4114,7 +4170,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4188,7 +4246,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4262,7 +4322,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4336,7 +4398,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4636,7 +4700,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4710,7 +4776,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4784,7 +4852,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4858,7 +4928,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5158,7 +5230,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5232,7 +5306,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5306,7 +5382,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5380,7 +5458,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr
index f623ea36204cd..d2d6bbab5f69f 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons.ambr
@@ -162,7 +162,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -349,7 +351,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -536,7 +540,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr
index 9fbd6af6c74ed..38542d31104b9 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_persons_udf.ambr
@@ -53,7 +53,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -131,7 +133,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -209,7 +213,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr
index 163abb23ac305..d06597a0b35da 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict.ambr
@@ -754,7 +754,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -872,7 +874,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -990,7 +994,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1108,7 +1114,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr
index f2e6752d368d0..651f296097a7b 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons.ambr
@@ -122,7 +122,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -269,7 +271,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -416,7 +420,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr
index 846e534decf6a..3a0a96ffa7162 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_persons_udf.ambr
@@ -53,7 +53,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -131,7 +133,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -209,7 +213,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr
index f0bbdae5329d3..21eb841990a53 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_strict_udf.ambr
@@ -554,7 +554,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -637,7 +639,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -720,7 +724,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -803,7 +809,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr
index e735153b628c3..5eafb6901598c 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors.ambr
@@ -148,7 +148,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -321,7 +323,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -494,7 +498,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr
index 6e86eda210324..36d25b420b5ee 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_trends_actors_udf.ambr
@@ -51,7 +51,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -127,7 +129,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -203,7 +207,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr
index cb6d8db14b8ce..545e7fa4d506c 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_udf.ambr
@@ -115,7 +115,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.created_at DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -419,7 +421,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.created_at DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -491,7 +495,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.created_at DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -563,7 +569,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.created_at DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1258,7 +1266,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1341,7 +1351,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1424,7 +1436,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1507,7 +1521,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr
index 3f5e9e4467e64..c52ab6eb60b8d 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered.ambr
@@ -1471,7 +1471,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2310,7 +2312,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2572,7 +2576,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2834,7 +2840,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr
index a4bfbc566ff43..4315f4b9bba92 100644
--- a/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr
+++ b/posthog/hogql_queries/insights/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr
@@ -266,7 +266,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr
index b8db8df7c3613..b2a235c2308e0 100644
--- a/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr
+++ b/posthog/hogql_queries/insights/test/__snapshots__/test_insight_actors_query_runner.ambr
@@ -66,7 +66,8 @@
WHERE equals(person.team_id, 99999)
GROUP BY person.id
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0)))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
- ORDER BY persons.properties___name ASC)
+ ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto')
LIMIT 100 SETTINGS readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
@@ -154,7 +155,8 @@
and isNull(toStartOfDay(parseDateTime64BestEffortOrNull('2020-01-12', 6, 'US/Pacific')))), ifNull(equals(status, 'returning'), 0))) AS source)))
GROUP BY person.id
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
- ORDER BY persons.properties___name ASC)
+ ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto')
LIMIT 100 SETTINGS readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
@@ -192,7 +194,8 @@
WHERE equals(groups.team_id, 99999)
GROUP BY groups.group_type_index,
groups.group_key) AS groups ON equals(groups.key, source.group_key)
- ORDER BY groups.properties___name ASC)
+ ORDER BY groups.properties___name ASC SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto')
LIMIT 100 SETTINGS readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
@@ -258,7 +261,8 @@
WHERE ifNull(equals(num_intervals, 2), 0)) AS source)))
GROUP BY person.id
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
- ORDER BY persons.properties___name ASC)
+ ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto')
LIMIT 100 SETTINGS readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
@@ -291,7 +295,8 @@
WHERE equals(groups.team_id, 99999)
GROUP BY groups.group_type_index,
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
- ORDER BY groups.properties___name ASC)
+ ORDER BY groups.properties___name ASC SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto')
LIMIT 100 SETTINGS readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
@@ -357,7 +362,8 @@
GROUP BY actor_id) AS source)))
GROUP BY person.id
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
- ORDER BY persons.properties___name ASC)
+ ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto')
LIMIT 100 SETTINGS readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
@@ -432,7 +438,8 @@
GROUP BY actor_id) AS source)))
GROUP BY person.id
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'US/Pacific'), person.version), plus(now64(6, 'US/Pacific'), toIntervalDay(1))), 0))))) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
- ORDER BY persons.properties___name ASC)
+ ORDER BY persons.properties___name ASC SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto')
LIMIT 100 SETTINGS readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr
index 20f2012034bba..905fb297fe4a9 100644
--- a/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr
+++ b/posthog/hogql_queries/insights/test/__snapshots__/test_paths_query_runner_ee.ambr
@@ -1301,7 +1301,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1448,7 +1450,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1595,7 +1599,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1744,7 +1750,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1891,7 +1899,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2038,7 +2048,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2187,7 +2199,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2334,7 +2348,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2481,7 +2497,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2723,7 +2741,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -2890,7 +2910,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3043,7 +3065,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3208,7 +3232,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3383,7 +3409,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3723,7 +3751,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -3967,7 +3997,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4211,7 +4243,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4455,7 +4489,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4849,7 +4885,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -4996,7 +5034,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5225,7 +5265,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5454,7 +5496,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5601,7 +5645,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -5748,7 +5794,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -13155,7 +13203,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -13302,7 +13352,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -13449,7 +13501,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -13598,7 +13652,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -13745,7 +13801,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -13892,7 +13950,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -14041,7 +14101,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -14188,7 +14250,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -14335,7 +14399,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -14577,7 +14643,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -14744,7 +14812,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -14897,7 +14967,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -15062,7 +15134,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -15237,7 +15311,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -15577,7 +15653,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -15821,7 +15899,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -16065,7 +16145,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -16309,7 +16391,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -16703,7 +16787,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -16850,7 +16936,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -17079,7 +17167,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -17308,7 +17398,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -17455,7 +17547,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -17602,7 +17696,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY persons.id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr b/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr
index d726c177939a3..477e07b03d8f9 100644
--- a/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr
+++ b/posthog/hogql_queries/insights/test/__snapshots__/test_retention_query_runner.ambr
@@ -63,7 +63,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY length(source.appearances) DESC, source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -164,7 +166,9 @@
groups.group_key) AS groups ON equals(groups.key, source.actor_id)
ORDER BY length(source.appearances) DESC, source.actor_id ASC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
index 4b096b060262e..9bd3a90b8d559 100644
--- a/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
+++ b/posthog/hogql_queries/insights/trends/test/__snapshots__/test_trends.ambr
@@ -42,11 +42,14 @@
# name: TestTrends.test_action_filtering_with_cohort.4
'''
/* celery:posthog.tasks.calculate_cohort.clear_stale_cohort */
- SELECT count()
+ SELECT team_id,
+ count() AS stale_people_count
FROM cohortpeople
- WHERE team_id = 99999
+ WHERE team_id IN [1, 2, 3, 4, 5 /* ... */]
AND cohort_id = 99999
AND version < 2
+ GROUP BY team_id
+ HAVING stale_people_count > 0
'''
# ---
# name: TestTrends.test_action_filtering_with_cohort.5
@@ -138,11 +141,14 @@
# name: TestTrends.test_action_filtering_with_cohort_poe_v2.4
'''
/* celery:posthog.tasks.calculate_cohort.clear_stale_cohort */
- SELECT count()
+ SELECT team_id,
+ count() AS stale_people_count
FROM cohortpeople
- WHERE team_id = 99999
+ WHERE team_id IN [1, 2, 3, 4, 5 /* ... */]
AND cohort_id = 99999
AND version < 2
+ GROUP BY team_id
+ HAVING stale_people_count > 0
'''
# ---
# name: TestTrends.test_action_filtering_with_cohort_poe_v2.5
@@ -278,7 +284,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY source.event_count DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
@@ -1422,7 +1430,9 @@
HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, 'UTC'), person.version), plus(now64(6, 'UTC'), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons ON equals(persons.id, source.actor_id)
ORDER BY source.event_count DESC
LIMIT 101
- OFFSET 0 SETTINGS readonly=2,
+ OFFSET 0 SETTINGS optimize_aggregation_in_order=1,
+ join_algorithm='auto',
+ readonly=2,
max_execution_time=60,
allow_experimental_object_type=1,
format_csv_allow_double_quotes=0,
diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py
index 668cd8b2afb48..c7de458195b2f 100644
--- a/posthog/hogql_queries/insights/trends/trends_query_runner.py
+++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py
@@ -219,7 +219,10 @@ def to_actors_query_options(self) -> InsightActorsQueryOptionsResponse:
for value in self.query.breakdownFilter.breakdown:
if value != "all" and str(value) != "0":
res_breakdown.append(
- BreakdownItem(label=Cohort.objects.get(pk=int(value), team=self.team).name, value=value)
+ BreakdownItem(
+ label=Cohort.objects.get(pk=int(value), team__project_id=self.team.project_id).name,
+ value=value,
+ )
)
else:
res_breakdown.append(BreakdownItem(label="all users", value="all"))
diff --git a/posthog/hogql_queries/test/test_actors_query_runner.py b/posthog/hogql_queries/test/test_actors_query_runner.py
index 904c1adad8d9f..36a12166cb589 100644
--- a/posthog/hogql_queries/test/test_actors_query_runner.py
+++ b/posthog/hogql_queries/test/test_actors_query_runner.py
@@ -1,3 +1,5 @@
+from typing import cast
+
import pytest
from posthog.hogql import ast
@@ -66,7 +68,7 @@ def test_default_persons_query(self):
runner = self._create_runner(ActorsQuery())
query = runner.to_query()
- query = clear_locations(query)
+ query = cast(ast.SelectQuery, clear_locations(query))
expected = ast.SelectQuery(
select=[
ast.Field(chain=["id"]),
@@ -78,7 +80,8 @@ def test_default_persons_query(self):
where=None,
order_by=[ast.OrderExpr(expr=ast.Field(chain=["created_at"]), order="DESC")],
)
- assert clear_locations(query) == expected
+ query.settings = None
+ assert query == expected
response = runner.calculate()
assert len(response.results) == 10
diff --git a/posthog/management/commands/generate_demo_data.py b/posthog/management/commands/generate_demo_data.py
index ce094620453a1..dae5cca8ffa73 100644
--- a/posthog/management/commands/generate_demo_data.py
+++ b/posthog/management/commands/generate_demo_data.py
@@ -2,6 +2,7 @@
import logging
import secrets
from time import monotonic
+from typing import Optional
from django.core import exceptions
from django.core.management.base import BaseCommand
@@ -67,13 +68,13 @@ def handle(self, *args, **options):
seed = options.get("seed") or secrets.token_hex(16)
now = options.get("now") or dt.datetime.now(dt.UTC)
existing_team_id = options.get("team_id")
- if (
- existing_team_id is not None
- and existing_team_id != 0
- and not Team.objects.filter(pk=existing_team_id).exists()
- ):
- print(f"Team with ID {options['team_id']} does not exist!")
- return
+ existing_team: Optional[Team] = None
+ if existing_team_id is not None and existing_team_id != 0:
+ try:
+ existing_team = Team.objects.get(pk=existing_team_id)
+ except Team.DoesNotExist:
+ print(f"Team with ID {options['team_id']} does not exist!")
+ return
print("Instantiating the Matrix...")
matrix = HedgeboxMatrix(
seed,
@@ -81,8 +82,8 @@ def handle(self, *args, **options):
days_past=options["days_past"],
days_future=options["days_future"],
n_clusters=options["n_clusters"],
- group_type_index_offset=GroupTypeMapping.objects.filter(team_id=existing_team_id).count()
- if existing_team_id
+ group_type_index_offset=GroupTypeMapping.objects.filter(project_id=existing_team.project_id).count()
+ if existing_team
else 0,
)
print("Running simulation...")
diff --git a/posthog/models/cohort/cohort.py b/posthog/models/cohort/cohort.py
index 1ab980bfc6796..f658bbd07e6de 100644
--- a/posthog/models/cohort/cohort.py
+++ b/posthog/models/cohort/cohort.py
@@ -250,11 +250,16 @@ def calculate_people_ch(self, pending_version: int, *, initiating_user_id: Optio
clear_stale_cohort.delay(self.pk, before_version=pending_version)
- def insert_users_by_list(self, items: list[str]) -> None:
- """
- Items is a list of distinct_ids
+ def insert_users_by_list(self, items: list[str], *, team_id: Optional[int] = None) -> None:
"""
+ Insert a list of users identified by their distinct ID into the cohort, for the given team.
+ Args:
+ items: List of distinct IDs of users to be inserted into the cohort.
+ team_id: ID of the team for which to insert the users. Defaults to `self.team`, because of a lot of existing usage in tests.
+ """
+ if team_id is None:
+ team_id = self.team_id
batchsize = 1000
from posthog.models.cohort.util import (
insert_static_cohort,
@@ -272,10 +277,10 @@ def insert_users_by_list(self, items: list[str]) -> None:
for i in range(0, len(items), batchsize):
batch = items[i : i + batchsize]
persons_query = (
- Person.objects.filter(team_id=self.team_id)
+ Person.objects.filter(team_id=team_id)
.filter(
Q(
- persondistinctid__team_id=self.team_id,
+ persondistinctid__team_id=team_id,
persondistinctid__distinct_id__in=batch,
)
)
@@ -284,7 +289,7 @@ def insert_users_by_list(self, items: list[str]) -> None:
insert_static_cohort(
list(persons_query.values_list("uuid", flat=True)),
self.pk,
- self.team,
+ team_id=team_id,
)
sql, params = persons_query.distinct("pk").only("pk").query.sql_with_params()
query = UPDATE_QUERY.format(
@@ -297,7 +302,7 @@ def insert_users_by_list(self, items: list[str]) -> None:
)
cursor.execute(query, params)
- count = get_static_cohort_size(self)
+ count = get_static_cohort_size(cohort_id=self.id, team_id=self.team_id)
self.count = count
self.is_calculating = False
@@ -313,7 +318,18 @@ def insert_users_by_list(self, items: list[str]) -> None:
self.save()
capture_exception(err)
- def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool = False, batchsize=1000) -> None:
+ def insert_users_list_by_uuid(
+ self, items: list[str], insert_in_clickhouse: bool = False, batchsize=1000, *, team_id: int
+ ) -> None:
+ """
+ Insert a list of users identified by their UUID into the cohort, for the given team.
+
+ Args:
+ items: List of user UUIDs to be inserted into the cohort.
+ insert_in_clickhouse: Whether the data should also be inserted into ClickHouse.
+ batchsize: Number of UUIDs to process in each batch.
+ team_id: The ID of the team to which the cohort belongs.
+ """
from posthog.models.cohort.util import get_static_cohort_size, insert_static_cohort
try:
@@ -321,13 +337,13 @@ def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool
for i in range(0, len(items), batchsize):
batch = items[i : i + batchsize]
persons_query = (
- Person.objects.filter(team_id=self.team_id).filter(uuid__in=batch).exclude(cohort__id=self.id)
+ Person.objects.filter(team_id=team_id).filter(uuid__in=batch).exclude(cohort__id=self.id)
)
if insert_in_clickhouse:
insert_static_cohort(
list(persons_query.values_list("uuid", flat=True)),
self.pk,
- self.team,
+ team_id=team_id,
)
sql, params = persons_query.distinct("pk").only("pk").query.sql_with_params()
query = UPDATE_QUERY.format(
@@ -340,7 +356,7 @@ def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool
)
cursor.execute(query, params)
- count = get_static_cohort_size(self)
+ count = get_static_cohort_size(cohort_id=self.id, team_id=self.team_id)
self.count = count
self.is_calculating = False
@@ -357,12 +373,6 @@ def insert_users_list_by_uuid(self, items: list[str], insert_in_clickhouse: bool
self.save()
capture_exception(err)
- def _clickhouse_persons_query(self, batch_size=10000, offset=0):
- from posthog.models.cohort.util import get_person_ids_by_cohort_id
-
- uuids = get_person_ids_by_cohort_id(team=self.team, cohort_id=self.pk, limit=batch_size, offset=offset)
- return Person.objects.filter(uuid__in=uuids, team=self.team)
-
__repr__ = sane_repr("id", "name", "last_calculation")
diff --git a/posthog/models/cohort/sql.py b/posthog/models/cohort/sql.py
index a84394bae94a8..603f8addf08a2 100644
--- a/posthog/models/cohort/sql.py
+++ b/posthog/models/cohort/sql.py
@@ -91,6 +91,8 @@
"""
STALE_COHORTPEOPLE = f"""
-SELECT count() FROM cohortpeople
-WHERE team_id = %(team_id)s AND cohort_id = %(cohort_id)s AND version < %(version)s
+SELECT team_id, count() AS stale_people_count FROM cohortpeople
+WHERE team_id IN %(team_ids)s AND cohort_id = %(cohort_id)s AND version < %(version)s
+GROUP BY team_id
+HAVING stale_people_count > 0
"""
diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py
index fe589236fa62e..395085453c5e3 100644
--- a/posthog/models/cohort/util.py
+++ b/posthog/models/cohort/util.py
@@ -34,13 +34,10 @@
STALE_COHORTPEOPLE,
)
from posthog.models.person.sql import (
- GET_LATEST_PERSON_SQL,
- GET_PERSON_IDS_BY_FILTER,
INSERT_PERSON_STATIC_COHORT,
PERSON_STATIC_COHORT_TABLE,
)
from posthog.models.property import Property, PropertyGroup
-from posthog.queries.insight import insight_sync_execute
from posthog.queries.person_distinct_id_query import get_team_distinct_ids_query
# temporary marker to denote when cohortpeople table started being populated
@@ -75,14 +72,14 @@ def format_person_query(cohort: Cohort, index: int, hogql_context: HogQLContext)
return query, params
-def print_cohort_hogql_query(cohort: Cohort, hogql_context: HogQLContext) -> str:
+def print_cohort_hogql_query(cohort: Cohort, hogql_context: HogQLContext, *, team: Team) -> str:
from posthog.hogql_queries.query_runner import get_query_runner
if not cohort.query:
raise ValueError("Cohort has no query")
query = get_query_runner(
- cast(dict, cohort.query), team=cast(Team, cohort.team), limit_context=LimitContext.COHORT_CALCULATION
+ cast(dict, cohort.query), team=team, limit_context=LimitContext.COHORT_CALCULATION
).to_query()
for select_query in extract_select_queries(query):
@@ -109,7 +106,7 @@ def print_cohort_hogql_query(cohort: Cohort, hogql_context: HogQLContext) -> str
hogql_context.enable_select_queries = True
hogql_context.limit_top_select = False
- create_default_modifiers_for_team(cohort.team, hogql_context.modifiers)
+ create_default_modifiers_for_team(team, hogql_context.modifiers)
return print_ast(query, context=hogql_context, dialect="clickhouse")
@@ -262,10 +259,7 @@ def format_filter_query(
def format_cohort_subquery(
- cohort: Cohort,
- index: int,
- hogql_context: HogQLContext,
- custom_match_field="person_id",
+ cohort: Cohort, index: int, hogql_context: HogQLContext, custom_match_field="person_id"
) -> tuple[str, dict[str, Any]]:
is_precalculated = is_precalculated_query(cohort)
if is_precalculated:
@@ -277,46 +271,13 @@ def format_cohort_subquery(
return person_query, params
-def get_person_ids_by_cohort_id(
- team: Team,
- cohort_id: int,
- limit: Optional[int] = None,
- offset: Optional[int] = None,
-):
- from posthog.models.property.util import parse_prop_grouped_clauses
-
- filter = Filter(data={"properties": [{"key": "id", "value": cohort_id, "type": "cohort"}]})
- filter_query, filter_params = parse_prop_grouped_clauses(
- team_id=team.pk,
- property_group=filter.property_groups,
- table_name="pdi",
- hogql_context=filter.hogql_context,
- )
-
- results = insight_sync_execute(
- GET_PERSON_IDS_BY_FILTER.format(
- person_query=GET_LATEST_PERSON_SQL,
- distinct_query=filter_query,
- query="",
- GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team.pk),
- offset="OFFSET %(offset)s" if offset else "",
- limit="ORDER BY _timestamp ASC LIMIT %(limit)s" if limit else "",
- ),
- {**filter_params, "team_id": team.pk, "offset": offset, "limit": limit},
- query_type="get_person_ids_by_cohort_id",
- team_id=team.pk,
- )
-
- return [str(row[0]) for row in results]
-
-
-def insert_static_cohort(person_uuids: list[Optional[uuid.UUID]], cohort_id: int, team: Team):
+def insert_static_cohort(person_uuids: list[Optional[uuid.UUID]], cohort_id: int, *, team_id: int):
persons = [
{
"id": str(uuid.uuid4()),
"person_id": str(person_uuid),
"cohort_id": cohort_id,
- "team_id": team.pk,
+ "team_id": team_id,
"_timestamp": datetime.now(),
}
for person_uuid in person_uuids
@@ -324,12 +285,12 @@ def insert_static_cohort(person_uuids: list[Optional[uuid.UUID]], cohort_id: int
sync_execute(INSERT_PERSON_STATIC_COHORT, persons)
-def get_static_cohort_size(cohort: Cohort) -> Optional[int]:
+def get_static_cohort_size(*, cohort_id: int, team_id: int) -> Optional[int]:
count_result = sync_execute(
GET_STATIC_COHORT_SIZE_SQL,
{
- "cohort_id": cohort.pk,
- "team_id": cohort.team_id,
+ "cohort_id": cohort_id,
+ "team_id": team_id,
},
)
@@ -342,22 +303,39 @@ def get_static_cohort_size(cohort: Cohort) -> Optional[int]:
def recalculate_cohortpeople(
cohort: Cohort, pending_version: int, *, initiating_user_id: Optional[int]
) -> Optional[int]:
- hogql_context = HogQLContext(within_non_hogql_query=True, team_id=cohort.team_id)
+ """
+ Recalculate cohort people for all environments of the project.
+ NOTE: Currently this only returns the count for the team where the cohort was created. Instead it should return for all teams.
+ """
+ relevant_teams = Team.objects.order_by("id").filter(project_id=cohort.team.project_id)
+ count_by_team_id: dict[int, int] = {}
+ for team in relevant_teams:
+ count_for_team = _recalculate_cohortpeople_for_team(
+ cohort, pending_version, team, initiating_user_id=initiating_user_id
+ )
+ count_by_team_id[team.id] = count_for_team or 0
+ return count_by_team_id[cohort.team_id]
+
+
+def _recalculate_cohortpeople_for_team(
+ cohort: Cohort, pending_version: int, team: Team, *, initiating_user_id: Optional[int]
+) -> Optional[int]:
+ hogql_context = HogQLContext(within_non_hogql_query=True, team_id=team.id)
cohort_query, cohort_params = format_person_query(cohort, 0, hogql_context)
- before_count = get_cohort_size(cohort)
+ before_count = get_cohort_size(cohort, team_id=team.id)
if before_count:
logger.warn(
"Recalculating cohortpeople starting",
- team_id=cohort.team_id,
+ team_id=team.id,
cohort_id=cohort.pk,
size_before=before_count,
)
recalcluate_cohortpeople_sql = RECALCULATE_COHORT_BY_ID.format(cohort_filter=cohort_query)
- tag_queries(kind="cohort_calculation", team_id=cohort.team_id, query_type="CohortsQuery")
+ tag_queries(kind="cohort_calculation", team_id=team.id, query_type="CohortsQuery")
if initiating_user_id:
tag_queries(user_id=initiating_user_id)
@@ -367,7 +345,7 @@ def recalculate_cohortpeople(
**cohort_params,
**hogql_context.values,
"cohort_id": cohort.pk,
- "team_id": cohort.team_id,
+ "team_id": team.id,
"new_version": pending_version,
},
settings={
@@ -379,12 +357,12 @@ def recalculate_cohortpeople(
workload=Workload.OFFLINE,
)
- count = get_cohort_size(cohort, override_version=pending_version)
+ count = get_cohort_size(cohort, override_version=pending_version, team_id=team.id)
if count is not None and before_count is not None:
logger.warn(
"Recalculating cohortpeople done",
- team_id=cohort.team_id,
+ team_id=team.id,
cohort_id=cohort.pk,
size_before=before_count,
size=count,
@@ -395,33 +373,40 @@ def recalculate_cohortpeople(
def clear_stale_cohortpeople(cohort: Cohort, before_version: int) -> None:
if cohort.version and cohort.version > 0:
+ relevant_team_ids = list(Team.objects.filter(project_id=cohort.team.project_id).values_list("pk", flat=True))
stale_count_result = sync_execute(
STALE_COHORTPEOPLE,
{
"cohort_id": cohort.pk,
- "team_id": cohort.team_id,
+ "team_ids": relevant_team_ids,
"version": before_version,
},
)
- if stale_count_result and len(stale_count_result) and len(stale_count_result[0]):
- stale_count = stale_count_result[0][0]
- if stale_count > 0:
- # Don't do anything if it already exists
- AsyncDeletion.objects.get_or_create(
- deletion_type=DeletionType.Cohort_stale,
- team_id=cohort.team.pk,
- key=f"{cohort.pk}_{before_version}",
- )
+ team_ids_with_stale_cohortpeople = [row[0] for row in stale_count_result]
+ if team_ids_with_stale_cohortpeople:
+ AsyncDeletion.objects.bulk_create(
+ [
+ AsyncDeletion(
+ deletion_type=DeletionType.Cohort_stale,
+ team_id=team_id,
+ # Only appending `team_id` if it's not the same as the cohort's `team_id``, so that
+ # the migration to environments does not accidentally cause duplicate `AsyncDeletion`s
+ key=f"{cohort.pk}_{before_version}{('_'+team_id) if team_id != cohort.team_id else ''}",
+ )
+ for team_id in team_ids_with_stale_cohortpeople
+ ],
+ ignore_conflicts=True,
+ )
-def get_cohort_size(cohort: Cohort, override_version: Optional[int] = None) -> Optional[int]:
+def get_cohort_size(cohort: Cohort, override_version: Optional[int] = None, *, team_id: int) -> Optional[int]:
count_result = sync_execute(
GET_COHORT_SIZE_SQL,
{
"cohort_id": cohort.pk,
"version": override_version if override_version is not None else cohort.version,
- "team_id": cohort.team_id,
+ "team_id": team_id,
},
workload=Workload.OFFLINE,
)
@@ -545,7 +530,7 @@ def get_dependent_cohorts(
continue
else:
current_cohort = Cohort.objects.db_manager(using_database).get(
- pk=cohort_id, team_id=cohort.team_id, deleted=False
+ pk=cohort_id, team__project_id=cohort.team.project_id, deleted=False
)
seen_cohorts_cache[cohort_id] = current_cohort
if current_cohort.id not in seen_cohort_ids:
diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py
index c21af6a397117..beca926b7fbac 100644
--- a/posthog/models/feature_flag/feature_flag.py
+++ b/posthog/models/feature_flag/feature_flag.py
@@ -187,7 +187,7 @@ def transform_cohort_filters_for_easy_evaluation(
return self.conditions
else:
cohort = Cohort.objects.db_manager(using_database).get(
- pk=cohort_id, team_id=self.team_id, deleted=False
+ pk=cohort_id, team__project_id=self.team.project_id, deleted=False
)
seen_cohorts_cache[cohort_id] = cohort
except Cohort.DoesNotExist:
@@ -291,7 +291,7 @@ def get_cohort_ids(
continue
else:
cohort = Cohort.objects.db_manager(using_database).get(
- pk=cohort_id, team_id=self.team_id, deleted=False
+ pk=cohort_id, team__project_id=self.team.project_id, deleted=False
)
seen_cohorts_cache[cohort_id] = cohort
diff --git a/posthog/models/feature_flag/user_blast_radius.py b/posthog/models/feature_flag/user_blast_radius.py
index 712df09ed5002..bf08e8eed950a 100644
--- a/posthog/models/feature_flag/user_blast_radius.py
+++ b/posthog/models/feature_flag/user_blast_radius.py
@@ -77,7 +77,7 @@ def get_user_blast_radius(
if len(cohort_filters) == 1:
try:
- target_cohort = Cohort.objects.get(id=cohort_filters[0].value, team=team)
+ target_cohort = Cohort.objects.get(id=cohort_filters[0].value, team__project_id=team.project_id)
except Cohort.DoesNotExist:
pass
finally:
diff --git a/posthog/models/filters/mixins/simplify.py b/posthog/models/filters/mixins/simplify.py
index b152e07113f11..01f3d2c4d4745 100644
--- a/posthog/models/filters/mixins/simplify.py
+++ b/posthog/models/filters/mixins/simplify.py
@@ -108,7 +108,7 @@ def _simplify_property(self, team: "Team", property: "Property", **kwargs) -> "P
from posthog.models.cohort.util import simplified_cohort_filter_properties
try:
- cohort = Cohort.objects.get(pk=property.value, team_id=team.pk)
+ cohort = Cohort.objects.get(pk=property.value, team__project_id=team.project_id)
except Cohort.DoesNotExist:
# :TODO: Handle non-existing resource in-query instead
return PropertyGroup(type=PropertyOperatorType.AND, values=[property])
diff --git a/posthog/models/property/util.py b/posthog/models/property/util.py
index 90651b6cd1e5f..ef63b2f69c670 100644
--- a/posthog/models/property/util.py
+++ b/posthog/models/property/util.py
@@ -14,10 +14,7 @@
from rest_framework import exceptions
from posthog.clickhouse.kafka_engine import trim_quotes_expr
-from posthog.clickhouse.materialized_columns import (
- TableWithProperties,
- get_enabled_materialized_columns,
-)
+from posthog.clickhouse.materialized_columns import TableWithProperties, get_materialized_column_for_property
from posthog.constants import PropertyOperatorType
from posthog.hogql import ast
from posthog.hogql.hogql import HogQLContext
@@ -711,17 +708,18 @@ def get_property_string_expr(
(optional) alias of the table being queried
:return:
"""
- materialized_columns = get_enabled_materialized_columns(table) if allow_denormalized_props else {}
-
table_string = f"{table_alias}." if table_alias is not None and table_alias != "" else ""
if (
allow_denormalized_props
- and (property_name, materialised_table_column) in materialized_columns
+ and (
+ materialized_column := get_materialized_column_for_property(table, materialised_table_column, property_name)
+ )
+ and not materialized_column.is_nullable
and "group" not in materialised_table_column
):
return (
- f'{table_string}"{materialized_columns[(property_name, materialised_table_column)]}"',
+ f'{table_string}"{materialized_column.name}"',
True,
)
diff --git a/posthog/models/test/test_async_deletion_model.py b/posthog/models/test/test_async_deletion_model.py
index 8f4125be67a3c..e5649d6e812d9 100644
--- a/posthog/models/test/test_async_deletion_model.py
+++ b/posthog/models/test/test_async_deletion_model.py
@@ -365,7 +365,7 @@ def test_delete_auxilary_models_via_team(self):
group_key="org:5",
properties={},
)
- insert_static_cohort([uuid4()], 0, self.teams[0])
+ insert_static_cohort([uuid4()], 0, team_id=self.teams[0].pk)
self._insert_cohortpeople_row(self.teams[0], uuid4(), 3)
create_plugin_log_entry(
team_id=self.teams[0].pk,
@@ -403,7 +403,7 @@ def test_delete_auxilary_models_via_team_unrelated(self):
group_key="org:5",
properties={},
)
- insert_static_cohort([uuid4()], 0, self.teams[1])
+ insert_static_cohort([uuid4()], 0, team_id=self.teams[1].pk)
self._insert_cohortpeople_row(self.teams[1], uuid4(), 3)
create_plugin_log_entry(
team_id=self.teams[1].pk,
diff --git a/posthog/queries/column_optimizer/foss_column_optimizer.py b/posthog/queries/column_optimizer/foss_column_optimizer.py
index 4fffbd1faa350..c998d92480b5a 100644
--- a/posthog/queries/column_optimizer/foss_column_optimizer.py
+++ b/posthog/queries/column_optimizer/foss_column_optimizer.py
@@ -3,7 +3,7 @@
from typing import Union, cast
from collections.abc import Generator
-from posthog.clickhouse.materialized_columns import ColumnName, get_enabled_materialized_columns
+from posthog.clickhouse.materialized_columns import ColumnName, get_materialized_column_for_property
from posthog.constants import TREND_FILTER_TYPE_ACTIONS, FunnelCorrelationType
from posthog.models.action.util import (
get_action_tables_and_properties,
@@ -72,12 +72,14 @@ def columns_to_query(
table_column: str = "properties",
) -> set[ColumnName]:
"Transforms a list of property names to what columns are needed for that query"
-
- materialized_columns = get_enabled_materialized_columns(table)
- return {
- materialized_columns.get((property_name, table_column), table_column)
- for property_name, _, _ in used_properties
- }
+ column_names = set()
+ for property_name, _, _ in used_properties:
+ column = get_materialized_column_for_property(table, table_column, property_name)
+ if column is not None and not column.is_nullable:
+ column_names.add(column.name)
+ else:
+ column_names.add(table_column)
+ return column_names
@cached_property
def is_using_person_properties(self) -> bool:
diff --git a/posthog/queries/test/__snapshots__/test_trends.ambr b/posthog/queries/test/__snapshots__/test_trends.ambr
index 879da96b30821..01ab1c2e0e23e 100644
--- a/posthog/queries/test/__snapshots__/test_trends.ambr
+++ b/posthog/queries/test/__snapshots__/test_trends.ambr
@@ -22,11 +22,14 @@
# name: TestTrends.test_action_filtering_with_cohort.2
'''
/* celery:posthog.tasks.calculate_cohort.clear_stale_cohort */
- SELECT count()
+ SELECT team_id,
+ count() AS stale_people_count
FROM cohortpeople
- WHERE team_id = 99999
+ WHERE team_id IN [1, 2, 3, 4, 5 /* ... */]
AND cohort_id = 99999
AND version < 2
+ GROUP BY team_id
+ HAVING stale_people_count > 0
'''
# ---
# name: TestTrends.test_action_filtering_with_cohort.3
@@ -110,11 +113,14 @@
# name: TestTrends.test_action_filtering_with_cohort_poe_v2.2
'''
/* celery:posthog.tasks.calculate_cohort.clear_stale_cohort */
- SELECT count()
+ SELECT team_id,
+ count() AS stale_people_count
FROM cohortpeople
- WHERE team_id = 99999
+ WHERE team_id IN [1, 2, 3, 4, 5 /* ... */]
AND cohort_id = 99999
AND version < 2
+ GROUP BY team_id
+ HAVING stale_people_count > 0
'''
# ---
# name: TestTrends.test_action_filtering_with_cohort_poe_v2.3
diff --git a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
index 88a534a569646..c25bdb4d587b4 100644
--- a/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
+++ b/posthog/session_recordings/test/__snapshots__/test_session_recordings.ambr
@@ -738,7 +738,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_get_session_recordings.24
@@ -865,7 +865,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_get_session_recordings.28
@@ -1637,7 +1637,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.100
@@ -1788,7 +1788,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.105
@@ -1915,7 +1915,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.109
@@ -2543,7 +2543,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.125
@@ -2670,7 +2670,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.129
@@ -3234,7 +3234,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.145
@@ -3361,7 +3361,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.149
@@ -3988,7 +3988,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.165
@@ -4115,7 +4115,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.169
@@ -4706,7 +4706,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.185
@@ -4833,7 +4833,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.189
@@ -5506,7 +5506,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.205
@@ -5633,7 +5633,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.209
@@ -5940,7 +5940,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.25
@@ -6067,7 +6067,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.29
@@ -6671,7 +6671,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.45
@@ -6798,7 +6798,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.49
@@ -7214,7 +7214,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.60
@@ -7365,7 +7365,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.65
@@ -7492,7 +7492,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.69
@@ -8116,7 +8116,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.85
@@ -8243,7 +8243,7 @@
"posthog_grouptypemapping"."name_singular",
"posthog_grouptypemapping"."name_plural"
FROM "posthog_grouptypemapping"
- WHERE "posthog_grouptypemapping"."team_id" = 99999
+ WHERE "posthog_grouptypemapping"."project_id" = 99999
'''
# ---
# name: TestSessionRecordings.test_listing_recordings_is_not_nplus1_for_persons.89
diff --git a/posthog/tasks/calculate_cohort.py b/posthog/tasks/calculate_cohort.py
index 53bc65d9abef6..be28173b5000f 100644
--- a/posthog/tasks/calculate_cohort.py
+++ b/posthog/tasks/calculate_cohort.py
@@ -1,20 +1,23 @@
import time
from typing import Any, Optional
+from django.conf import settings
+
+from posthog.models.team.team import Team
import structlog
from celery import shared_task
from dateutil.relativedelta import relativedelta
from django.db.models import F, ExpressionWrapper, DurationField, Q
from django.utils import timezone
from prometheus_client import Gauge
-from sentry_sdk import set_tag
+from sentry_sdk import capture_exception, set_tag
from datetime import timedelta
from posthog.api.monitoring import Feature
from posthog.models import Cohort
from posthog.models.cohort import get_and_update_pending_version
-from posthog.models.cohort.util import clear_stale_cohortpeople
+from posthog.models.cohort.util import clear_stale_cohortpeople, get_static_cohort_size
from posthog.models.user import User
COHORT_RECALCULATIONS_BACKLOG_GAUGE = Gauge(
@@ -109,37 +112,65 @@ def calculate_cohort_ch(cohort_id: int, pending_version: int, initiating_user_id
@shared_task(ignore_result=True, max_retries=1)
-def calculate_cohort_from_list(cohort_id: int, items: list[str]) -> None:
+def calculate_cohort_from_list(cohort_id: int, items: list[str], team_id: Optional[int] = None) -> None:
+ """
+ team_id is only optional for backwards compatibility with the old celery task signature.
+ All new tasks should pass team_id explicitly.
+ """
start_time = time.time()
cohort = Cohort.objects.get(pk=cohort_id)
+ if team_id is None:
+ team_id = cohort.team_id
- cohort.insert_users_by_list(items)
+ cohort.insert_users_by_list(items, team_id=team_id)
logger.warn("Calculating cohort {} from CSV took {:.2f} seconds".format(cohort.pk, (time.time() - start_time)))
@shared_task(ignore_result=True, max_retries=1)
-def insert_cohort_from_insight_filter(cohort_id: int, filter_data: dict[str, Any]) -> None:
- from posthog.api.cohort import (
- insert_cohort_actors_into_ch,
- insert_cohort_people_into_pg,
- )
+def insert_cohort_from_insight_filter(
+ cohort_id: int, filter_data: dict[str, Any], team_id: Optional[int] = None
+) -> None:
+ """
+ team_id is only optional for backwards compatibility with the old celery task signature.
+ All new tasks should pass team_id explicitly.
+ """
+ from posthog.api.cohort import insert_cohort_actors_into_ch, insert_cohort_people_into_pg
cohort = Cohort.objects.get(pk=cohort_id)
+ if team_id is None:
+ team_id = cohort.team_id
- insert_cohort_actors_into_ch(cohort, filter_data)
- insert_cohort_people_into_pg(cohort=cohort)
+ insert_cohort_actors_into_ch(cohort, filter_data, team_id=team_id)
+ insert_cohort_people_into_pg(cohort, team_id=team_id)
@shared_task(ignore_result=True, max_retries=1)
-def insert_cohort_from_query(cohort_id: int) -> None:
- from posthog.api.cohort import (
- insert_cohort_people_into_pg,
- insert_cohort_query_actors_into_ch,
- )
+def insert_cohort_from_query(cohort_id: int, team_id: Optional[int] = None) -> None:
+ """
+ team_id is only optional for backwards compatibility with the old celery task signature.
+ All new tasks should pass team_id explicitly.
+ """
+ from posthog.api.cohort import insert_cohort_people_into_pg, insert_cohort_query_actors_into_ch
cohort = Cohort.objects.get(pk=cohort_id)
- insert_cohort_query_actors_into_ch(cohort)
- insert_cohort_people_into_pg(cohort=cohort)
+ if team_id is None:
+ team_id = cohort.team_id
+ team = Team.objects.get(pk=team_id)
+ try:
+ insert_cohort_query_actors_into_ch(cohort, team=team)
+ insert_cohort_people_into_pg(cohort, team_id=team_id)
+ cohort.count = get_static_cohort_size(cohort_id=cohort.id, team_id=cohort.team_id)
+ cohort.errors_calculating = 0
+ cohort.last_calculation = timezone.now()
+ except:
+ cohort.errors_calculating = F("errors_calculating") + 1
+ cohort.last_error_at = timezone.now()
+ capture_exception()
+ if settings.DEBUG:
+ raise
+ finally:
+ cohort.is_calculating = False
+ cohort.save()
@shared_task(ignore_result=True, max_retries=1)
diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py
index 751b8f5db70cc..7657db26c203f 100644
--- a/posthog/tasks/exports/csv_exporter.py
+++ b/posthog/tasks/exports/csv_exporter.py
@@ -170,19 +170,21 @@ def _convert_response_to_csv_data(data: Any) -> Generator[Any, None, None]:
yield line
return
elif isinstance(first_result.get("data"), list):
+ is_comparison = first_result.get("compare_label")
+
+ # take date labels from current results, when comparing against previous
+ # as previous results will be indexed with offset
+ date_labels_item = next((x for x in results if x.get("compare_label") == "current"), None)
+
# TRENDS LIKE
for index, item in enumerate(results):
label = item.get("label", f"Series #{index + 1}")
compare_label = item.get("compare_label", "")
series_name = f"{label} - {compare_label}" if compare_label else label
- line = {"series": series_name}
- # take labels from current results, when comparing against previous
- if item.get("compare_label") == "previous":
- label_item = results[index - 1]
- else:
- label_item = item
+ line = {"series": series_name}
+ label_item = date_labels_item if is_comparison else item
action = item.get("action")
if isinstance(action, dict) and action.get("custom_name"):
diff --git a/posthog/tasks/exports/test/test_csv_exporter.py b/posthog/tasks/exports/test/test_csv_exporter.py
index 29b57da6d7a0b..4d53742ed65bb 100644
--- a/posthog/tasks/exports/test/test_csv_exporter.py
+++ b/posthog/tasks/exports/test/test_csv_exporter.py
@@ -2,13 +2,13 @@
from typing import Any, Optional
from unittest import mock
from unittest.mock import MagicMock, Mock, patch, ANY
+from dateutil.relativedelta import relativedelta
from openpyxl import load_workbook
from io import BytesIO
import pytest
from boto3 import resource
from botocore.client import Config
-from dateutil.relativedelta import relativedelta
from django.test import override_settings
from django.utils.timezone import now
from requests.exceptions import HTTPError
@@ -703,23 +703,75 @@ def test_csv_exporter_trends_query_with_compare_previous_option(
self,
) -> None:
_create_person(distinct_ids=[f"user_1"], team=self.team)
- events_by_person = {
- "user_1": [
- {
- "event": "$pageview",
- "timestamp": datetime(2023, 3, 21, 13, 46),
- },
- {
- "event": "$pageview",
- "timestamp": datetime(2023, 3, 21, 13, 46),
- },
- {
- "event": "$pageview",
- "timestamp": datetime(2023, 3, 22, 13, 47),
- },
- ],
- }
- journeys_for(events_by_person, self.team)
+
+ date = datetime(2023, 3, 21, 13, 46)
+ date_next_week = date + relativedelta(days=7)
+
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date,
+ properties={"$browser": "Safari"},
+ )
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date,
+ properties={"$browser": "Chrome"},
+ )
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date,
+ properties={"$browser": "Chrome"},
+ )
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date,
+ properties={"$browser": "Firefox"},
+ )
+
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date_next_week,
+ properties={"$browser": "Chrome"},
+ )
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date_next_week,
+ properties={"$browser": "Chrome"},
+ )
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date_next_week,
+ properties={"$browser": "Chrome"},
+ )
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date_next_week,
+ properties={"$browser": "Firefox"},
+ )
+ _create_event(
+ event="$pageview",
+ distinct_id="1",
+ team=self.team,
+ timestamp=date_next_week,
+ properties={"$browser": "Firefox"},
+ )
+
flush_persons_and_events()
exported_asset = ExportedAsset(
@@ -728,7 +780,10 @@ def test_csv_exporter_trends_query_with_compare_previous_option(
export_context={
"source": {
"kind": "TrendsQuery",
- "dateRange": {"date_to": "2023-03-22", "date_from": "2023-03-22"},
+ "dateRange": {
+ "date_from": date.strftime("%Y-%m-%d"),
+ "date_to": date_next_week.strftime("%Y-%m-%d"),
+ },
"series": [
{
"kind": "EventsNode",
@@ -738,7 +793,8 @@ def test_csv_exporter_trends_query_with_compare_previous_option(
},
],
"interval": "day",
- "compareFilter": {"compare": True},
+ "compareFilter": {"compare": True, "compare_to": "-1w"},
+ "breakdownFilter": {"breakdown": "$browser", "breakdown_type": "event"},
}
},
)
@@ -747,5 +803,17 @@ def test_csv_exporter_trends_query_with_compare_previous_option(
with self.settings(OBJECT_STORAGE_ENABLED=True, OBJECT_STORAGE_EXPORTS_FOLDER="Test-Exports"):
csv_exporter.export_tabular(exported_asset)
content = object_storage.read(exported_asset.content_location) # type: ignore
- lines = (content or "").strip().split("\r\n")
- self.assertEqual(lines, ["series,22-Mar-2023", "$pageview - current,1", "$pageview - previous,2"])
+
+ lines = (content or "").strip().splitlines()
+
+ expected_lines = [
+ "series,21-Mar-2023,22-Mar-2023,23-Mar-2023,24-Mar-2023,25-Mar-2023,26-Mar-2023,27-Mar-2023,28-Mar-2023",
+ "Chrome - current,2.0,0.0,0.0,0.0,0.0,0.0,0.0,3.0",
+ "Firefox - current,1.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0",
+ "Safari - current,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0",
+ "Chrome - previous,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0",
+ "Firefox - previous,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0",
+ "Safari - previous,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0",
+ ]
+
+ self.assertEqual(lines, expected_lines)
diff --git a/posthog/tasks/test/__snapshots__/test_usage_report.ambr b/posthog/tasks/test/__snapshots__/test_usage_report.ambr
index 2230c532da5ca..36733da586c57 100644
--- a/posthog/tasks/test/__snapshots__/test_usage_report.ambr
+++ b/posthog/tasks/test/__snapshots__/test_usage_report.ambr
@@ -3,7 +3,7 @@
'''
SELECT team_id,
- multiIf(event LIKE 'helicone%', 'helicone_events', event LIKE 'langfuse%', 'langfuse_events', event LIKE 'keywords_ai%', 'keywords_ai_events', event LIKE 'traceloop%', 'traceloop_events', JSONExtractString(properties, '$lib') = 'web', 'web_events', JSONExtractString(properties, '$lib') = 'js', 'web_lite_events', JSONExtractString(properties, '$lib') = 'posthog-node', 'node_events', JSONExtractString(properties, '$lib') = 'posthog-android', 'android_events', JSONExtractString(properties, '$lib') = 'posthog-flutter', 'flutter_events', JSONExtractString(properties, '$lib') = 'posthog-ios', 'ios_events', JSONExtractString(properties, '$lib') = 'posthog-go', 'go_events', JSONExtractString(properties, '$lib') = 'posthog-java', 'java_events', JSONExtractString(properties, '$lib') = 'posthog-react-native', 'react_native_events', JSONExtractString(properties, '$lib') = 'posthog-ruby', 'ruby_events', JSONExtractString(properties, '$lib') = 'posthog-python', 'python_events', JSONExtractString(properties, '$lib') = 'posthog-php', 'php_events', 'other') AS metric,
+ multiIf(event LIKE 'helicone%', 'helicone_events', event LIKE 'langfuse%', 'langfuse_events', event LIKE 'keywords_ai%', 'keywords_ai_events', event LIKE 'traceloop%', 'traceloop_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'web', 'web_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'js', 'web_lite_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-node', 'node_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-android', 'android_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-flutter', 'flutter_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-ios', 'ios_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-go', 'go_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-java', 'java_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-react-native', 'react_native_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-ruby', 'ruby_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-python', 'python_events', replaceRegexpAll(JSONExtractRaw(properties, '$lib'), '^"|"$', '') = 'posthog-php', 'php_events', 'other') AS metric,
count(1) as count
FROM events
WHERE timestamp BETWEEN '2022-01-10 00:00:00' AND '2022-01-10 23:59:59'
diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py
index 968354fff3032..6964668c5fffc 100644
--- a/posthog/tasks/usage_report.py
+++ b/posthog/tasks/usage_report.py
@@ -19,7 +19,6 @@
from posthog import version_requirement
from posthog.clickhouse.client.connection import Workload
-from posthog.clickhouse.materialized_columns import get_enabled_materialized_columns
from posthog.client import sync_execute
from posthog.cloud_utils import get_cached_instance_license, is_cloud
from posthog.constants import FlagRequestType
@@ -29,6 +28,7 @@
from posthog.models.feature_flag import FeatureFlag
from posthog.models.organization import Organization
from posthog.models.plugin import PluginConfig
+from posthog.models.property.util import get_property_string_expr
from posthog.models.team.team import Team
from posthog.models.utils import namedtuplefetchall
from posthog.settings import CLICKHOUSE_CLUSTER, INSTANCE_TAG
@@ -460,10 +460,8 @@ def get_teams_with_event_count_with_groups_in_period(begin: datetime, end: datet
@timed_log()
@retry(tries=QUERY_RETRIES, delay=QUERY_RETRY_DELAY, backoff=QUERY_RETRY_BACKOFF)
def get_all_event_metrics_in_period(begin: datetime, end: datetime) -> dict[str, list[tuple[int, int]]]:
- materialized_columns = get_enabled_materialized_columns("events")
-
# Check if $lib is materialized
- lib_expression = materialized_columns.get(("$lib", "properties"), "JSONExtractString(properties, '$lib')")
+ lib_expression, _ = get_property_string_expr("events", "$lib", "'$lib'", "properties")
results = sync_execute(
f"""
diff --git a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py
index 41b8ceec1ef41..ae81f9fa61fe6 100644
--- a/posthog/temporal/data_imports/pipelines/sql_database/__init__.py
+++ b/posthog/temporal/data_imports/pipelines/sql_database/__init__.py
@@ -21,6 +21,9 @@
from posthog.warehouse.models.external_data_source import ExternalDataSource
from sqlalchemy.sql import text
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import serialization
+
from .helpers import (
table_rows,
engine_from_credentials,
@@ -111,8 +114,11 @@ def sql_source_for_type(
def snowflake_source(
account_id: str,
- user: str,
- password: str,
+ user: Optional[str],
+ password: Optional[str],
+ passphrase: Optional[str],
+ private_key: Optional[str],
+ auth_type: str,
database: str,
warehouse: str,
schema: str,
@@ -122,13 +128,6 @@ def snowflake_source(
incremental_field: Optional[str] = None,
incremental_field_type: Optional[IncrementalFieldType] = None,
) -> DltSource:
- account_id = quote(account_id)
- user = quote(user)
- password = quote(password)
- database = quote(database)
- warehouse = quote(warehouse)
- role = quote(role) if role else None
-
if incremental_field is not None and incremental_field_type is not None:
incremental: dlt.sources.incremental | None = dlt.sources.incremental(
cursor_path=incremental_field, initial_value=incremental_type_to_initial_value(incremental_field_type)
@@ -136,9 +135,46 @@ def snowflake_source(
else:
incremental = None
- credentials = ConnectionStringCredentials(
- f"snowflake://{user}:{password}@{account_id}/{database}/{schema}?warehouse={warehouse}{f'&role={role}' if role else ''}"
- )
+ if auth_type == "password" and user is not None and password is not None:
+ account_id = quote(account_id)
+ user = quote(user)
+ password = quote(password)
+ database = quote(database)
+ warehouse = quote(warehouse)
+ role = quote(role) if role else None
+
+ credentials = create_engine(
+ f"snowflake://{user}:{password}@{account_id}/{database}/{schema}?warehouse={warehouse}{f'&role={role}' if role else ''}"
+ )
+ else:
+ assert private_key is not None
+ assert user is not None
+
+ account_id = quote(account_id)
+ user = quote(user)
+ database = quote(database)
+ warehouse = quote(warehouse)
+ role = quote(role) if role else None
+
+ p_key = serialization.load_pem_private_key(
+ private_key.encode("utf-8"),
+ password=passphrase.encode() if passphrase is not None else None,
+ backend=default_backend(),
+ )
+
+ pkb = p_key.private_bytes(
+ encoding=serialization.Encoding.DER,
+ format=serialization.PrivateFormat.PKCS8,
+ encryption_algorithm=serialization.NoEncryption(),
+ )
+
+ credentials = create_engine(
+ f"snowflake://{user}@{account_id}/{database}/{schema}?warehouse={warehouse}{f'&role={role}' if role else ''}",
+ connect_args={
+ "private_key": pkb,
+ },
+ )
+
db_source = sql_database(
credentials=credentials,
schema=schema,
diff --git a/posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py b/posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py
index 92f4d1f87cec4..a3fc1c6b2838b 100644
--- a/posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py
+++ b/posthog/temporal/data_imports/pipelines/sql_database_v2/__init__.py
@@ -20,6 +20,9 @@
from posthog.warehouse.models import ExternalDataSource
from posthog.warehouse.types import IncrementalFieldType
+from cryptography.hazmat.backends import default_backend
+from cryptography.hazmat.primitives import serialization
+
from .helpers import (
SelectAny,
table_rows,
@@ -127,8 +130,11 @@ def sql_source_for_type(
def snowflake_source(
account_id: str,
- user: str,
- password: str,
+ user: Optional[str],
+ password: Optional[str],
+ passphrase: Optional[str],
+ private_key: Optional[str],
+ auth_type: str,
database: str,
warehouse: str,
schema: str,
@@ -138,13 +144,6 @@ def snowflake_source(
incremental_field: Optional[str] = None,
incremental_field_type: Optional[IncrementalFieldType] = None,
) -> DltSource:
- account_id = quote(account_id)
- user = quote(user)
- password = quote(password)
- database = quote(database)
- warehouse = quote(warehouse)
- role = quote(role) if role else None
-
if incremental_field is not None and incremental_field_type is not None:
incremental: dlt.sources.incremental | None = dlt.sources.incremental(
cursor_path=incremental_field, initial_value=incremental_type_to_initial_value(incremental_field_type)
@@ -152,9 +151,46 @@ def snowflake_source(
else:
incremental = None
- credentials = ConnectionStringCredentials(
- f"snowflake://{user}:{password}@{account_id}/{database}/{schema}?warehouse={warehouse}{f'&role={role}' if role else ''}"
- )
+ if auth_type == "password" and user is not None and password is not None:
+ account_id = quote(account_id)
+ user = quote(user)
+ password = quote(password)
+ database = quote(database)
+ warehouse = quote(warehouse)
+ role = quote(role) if role else None
+
+ credentials = create_engine(
+ f"snowflake://{user}:{password}@{account_id}/{database}/{schema}?warehouse={warehouse}{f'&role={role}' if role else ''}"
+ )
+ else:
+ assert private_key is not None
+ assert user is not None
+
+ account_id = quote(account_id)
+ user = quote(user)
+ database = quote(database)
+ warehouse = quote(warehouse)
+ role = quote(role) if role else None
+
+ p_key = serialization.load_pem_private_key(
+ private_key.encode("utf-8"),
+ password=passphrase.encode() if passphrase is not None else None,
+ backend=default_backend(),
+ )
+
+ pkb = p_key.private_bytes(
+ encoding=serialization.Encoding.DER,
+ format=serialization.PrivateFormat.PKCS8,
+ encryption_algorithm=serialization.NoEncryption(),
+ )
+
+ credentials = create_engine(
+ f"snowflake://{user}@{account_id}/{database}/{schema}?warehouse={warehouse}{f'&role={role}' if role else ''}",
+ connect_args={
+ "private_key": pkb,
+ },
+ )
+
db_source = sql_database(
credentials=credentials,
schema=schema,
diff --git a/posthog/temporal/data_imports/workflow_activities/import_data_sync.py b/posthog/temporal/data_imports/workflow_activities/import_data_sync.py
index e308e19965bdd..37b65c4c68fad 100644
--- a/posthog/temporal/data_imports/workflow_activities/import_data_sync.py
+++ b/posthog/temporal/data_imports/workflow_activities/import_data_sync.py
@@ -270,17 +270,24 @@ def import_data_activity_sync(inputs: ImportDataActivityInputs):
)
account_id = model.pipeline.job_inputs.get("account_id")
- user = model.pipeline.job_inputs.get("user")
- password = model.pipeline.job_inputs.get("password")
database = model.pipeline.job_inputs.get("database")
warehouse = model.pipeline.job_inputs.get("warehouse")
sf_schema = model.pipeline.job_inputs.get("schema")
role = model.pipeline.job_inputs.get("role")
+ auth_type = model.pipeline.job_inputs.get("auth_type", "password")
+ auth_type_username = model.pipeline.job_inputs.get("user")
+ auth_type_password = model.pipeline.job_inputs.get("password")
+ auth_type_passphrase = model.pipeline.job_inputs.get("passphrase")
+ auth_type_private_key = model.pipeline.job_inputs.get("private_key")
+
source = snowflake_source(
account_id=account_id,
- user=user,
- password=password,
+ auth_type=auth_type,
+ user=auth_type_username,
+ password=auth_type_password,
+ private_key=auth_type_private_key,
+ passphrase=auth_type_passphrase,
database=database,
schema=sf_schema,
warehouse=warehouse,
diff --git a/posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py b/posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py
index 67f8c820e2837..b63d7ea869e16 100644
--- a/posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py
+++ b/posthog/temporal/data_imports/workflow_activities/sync_new_schemas.py
@@ -86,14 +86,29 @@ def sync_new_schemas_activity(inputs: SyncNewSchemasActivityInputs) -> None:
return
account_id = source.job_inputs.get("account_id")
- user = source.job_inputs.get("user")
- password = source.job_inputs.get("password")
database = source.job_inputs.get("database")
warehouse = source.job_inputs.get("warehouse")
sf_schema = source.job_inputs.get("schema")
role = source.job_inputs.get("role")
- sql_schemas = get_snowflake_schemas(account_id, database, warehouse, user, password, sf_schema, role)
+ auth_type = source.job_inputs.get("auth_type", "password")
+ auth_type_username = source.job_inputs.get("user")
+ auth_type_password = source.job_inputs.get("password")
+ auth_type_passphrase = source.job_inputs.get("passphrase")
+ auth_type_private_key = source.job_inputs.get("private_key")
+
+ sql_schemas = get_snowflake_schemas(
+ account_id=account_id,
+ database=database,
+ warehouse=warehouse,
+ user=auth_type_username,
+ password=auth_type_password,
+ schema=sf_schema,
+ role=role,
+ auth_type=auth_type,
+ passphrase=auth_type_passphrase,
+ private_key=auth_type_private_key,
+ )
schemas_to_sync = list(sql_schemas.keys())
else:
diff --git a/posthog/test/base.py b/posthog/test/base.py
index 43dcc0e130964..53f4932f2898f 100644
--- a/posthog/test/base.py
+++ b/posthog/test/base.py
@@ -30,7 +30,6 @@
from posthog import rate_limit, redis
from posthog.clickhouse.client import sync_execute
from posthog.clickhouse.client.connection import ch_pool
-from posthog.clickhouse.materialized_columns import get_materialized_columns
from posthog.clickhouse.plugin_log_entries import TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL
from posthog.cloud_utils import TEST_clear_instance_license_cache
from posthog.models import Dashboard, DashboardTile, Insight, Organization, Team, User
@@ -121,6 +120,8 @@ def clean_varying_query_parts(query, replace_all_numbers):
else:
query = re.sub(r"(team|cohort)_id(\"?) = \d+", r"\1_id\2 = 99999", query)
+ query = re.sub(r"(team|cohort)_id(\"?) IN \(\d+(, ?\d+)*\)", r"\1_id\2 IN (1, 2, 3, 4, 5 /* ... */)", query)
+ query = re.sub(r"(team|cohort)_id(\"?) IN \[\d+(, ?\d+)*\]", r"\1_id\2 IN [1, 2, 3, 4, 5 /* ... */]", query)
query = re.sub(r"\d+ as (team|cohort)_id(\"?)", r"99999 as \1_id\2", query)
# feature flag conditions use primary keys as columns in queries, so replace those always
query = re.sub(r"flag_\d+_condition", r"flag_X_condition", query)
@@ -575,35 +576,31 @@ def stripResponse(response, remove=("action", "label", "persons_urls", "filter")
return response
-def default_materialised_columns():
+def cleanup_materialized_columns():
try:
+ from ee.clickhouse.materialized_columns.columns import get_materialized_columns
from ee.clickhouse.materialized_columns.test.test_columns import EVENTS_TABLE_DEFAULT_MATERIALIZED_COLUMNS
except:
# EE not available? Skip
- return []
-
- default_columns = []
- for prop in EVENTS_TABLE_DEFAULT_MATERIALIZED_COLUMNS:
- column_name = get_materialized_columns("events")[(prop, "properties")]
- default_columns.append(column_name)
-
- return default_columns
-
+ return
-def cleanup_materialized_columns():
def optionally_drop(table, filter=None):
drops = ",".join(
[
- f"DROP COLUMN {column_name}"
- for column_name in get_materialized_columns(table).values()
- if filter is None or filter(column_name)
+ f"DROP COLUMN {column.name}"
+ for column in get_materialized_columns(table).values()
+ if filter is None or filter(column.name)
]
)
if drops:
sync_execute(f"ALTER TABLE {table} {drops} SETTINGS mutations_sync = 2")
- default_columns = default_materialised_columns()
- optionally_drop("events", lambda name: name not in default_columns)
+ default_column_names = {
+ get_materialized_columns("events")[(prop, "properties")].name
+ for prop in EVENTS_TABLE_DEFAULT_MATERIALIZED_COLUMNS
+ }
+
+ optionally_drop("events", lambda name: name not in default_column_names)
optionally_drop("person")
optionally_drop("groups")
diff --git a/posthog/urls.py b/posthog/urls.py
index 078a66c5af8b3..e0f79123a10f4 100644
--- a/posthog/urls.py
+++ b/posthog/urls.py
@@ -238,6 +238,8 @@ def opt_slash_path(route: str, view: Callable, name: Optional[str] = None) -> UR
path("year_in_posthog/2022//", year_in_posthog.render_2022),
path("year_in_posthog/2023/", year_in_posthog.render_2023),
path("year_in_posthog/2023//", year_in_posthog.render_2023),
+ path("year_in_posthog/2024/", year_in_posthog.render_2024),
+ path("year_in_posthog/2024//", year_in_posthog.render_2024),
]
if settings.DEBUG:
diff --git a/posthog/utils.py b/posthog/utils.py
index d8ea9315fec7c..5a22bfcdde9ff 100644
--- a/posthog/utils.py
+++ b/posthog/utils.py
@@ -348,7 +348,7 @@ def render_template(
context["js_url"] = get_js_url(request)
try:
- year_in_hog_url = f"/year_in_posthog/2023/{str(request.user.uuid)}" # type: ignore
+ year_in_hog_url = f"/year_in_posthog/2024/{str(request.user.uuid)}" # type: ignore
except:
year_in_hog_url = None
diff --git a/posthog/warehouse/api/external_data_schema.py b/posthog/warehouse/api/external_data_schema.py
index 55a4447907021..9391268bb69d0 100644
--- a/posthog/warehouse/api/external_data_schema.py
+++ b/posthog/warehouse/api/external_data_schema.py
@@ -350,14 +350,23 @@ def incremental_fields(self, request: Request, *args: Any, **kwargs: Any):
sf_schema = source.job_inputs.get("schema")
role = source.job_inputs.get("role")
+ auth_type = source.job_inputs.get("auth_type", "password")
+ auth_type_username = source.job_inputs.get("user")
+ auth_type_password = source.job_inputs.get("password")
+ auth_type_passphrase = source.job_inputs.get("passphrase")
+ auth_type_private_key = source.job_inputs.get("private_key")
+
sf_schemas = get_snowflake_schemas(
account_id=account_id,
database=database,
warehouse=warehouse,
- user=user,
- password=password,
+ user=auth_type_username,
+ password=auth_type_password,
schema=sf_schema,
role=role,
+ auth_type=auth_type,
+ passphrase=auth_type_passphrase,
+ private_key=auth_type_private_key,
)
columns = sf_schemas.get(instance.name, [])
diff --git a/posthog/warehouse/api/external_data_source.py b/posthog/warehouse/api/external_data_source.py
index 02e3c68a77422..28b1ebda1bf2e 100644
--- a/posthog/warehouse/api/external_data_source.py
+++ b/posthog/warehouse/api/external_data_source.py
@@ -640,10 +640,15 @@ def _handle_snowflake_source(
database = payload.get("database")
warehouse = payload.get("warehouse")
role = payload.get("role")
- user = payload.get("user")
- password = payload.get("password")
schema = payload.get("schema")
+ auth_type_obj = payload.get("auth_type", {})
+ auth_type = auth_type_obj.get("selection", None)
+ auth_type_username = auth_type_obj.get("username", None)
+ auth_type_password = auth_type_obj.get("password", None)
+ auth_type_passphrase = auth_type_obj.get("passphrase", None)
+ auth_type_private_key = auth_type_obj.get("private_key", None)
+
new_source_model = ExternalDataSource.objects.create(
source_id=str(uuid.uuid4()),
connection_id=str(uuid.uuid4()),
@@ -656,14 +661,28 @@ def _handle_snowflake_source(
"database": database,
"warehouse": warehouse,
"role": role,
- "user": user,
- "password": password,
"schema": schema,
+ "auth_type": auth_type,
+ "user": auth_type_username,
+ "password": auth_type_password,
+ "passphrase": auth_type_passphrase,
+ "private_key": auth_type_private_key,
},
prefix=prefix,
)
- schemas = get_snowflake_schemas(account_id, database, warehouse, user, password, schema, role)
+ schemas = get_snowflake_schemas(
+ account_id=account_id,
+ database=database,
+ warehouse=warehouse,
+ user=auth_type_username,
+ password=auth_type_password,
+ schema=schema,
+ role=role,
+ passphrase=auth_type_passphrase,
+ private_key=auth_type_private_key,
+ auth_type=auth_type,
+ )
return new_source_model, list(schemas.keys())
@@ -1068,20 +1087,48 @@ def database_schema(self, request: Request, *arg: Any, **kwargs: Any):
database = request.data.get("database")
warehouse = request.data.get("warehouse")
role = request.data.get("role")
- user = request.data.get("user")
- password = request.data.get("password")
schema = request.data.get("schema")
- if not account_id or not warehouse or not database or not user or not password or not schema:
+ auth_type_obj = request.data.get("auth_type", {})
+ auth_type = auth_type_obj.get("selection", None)
+ auth_type_username = auth_type_obj.get("username", None)
+ auth_type_password = auth_type_obj.get("password", None)
+ auth_type_passphrase = auth_type_obj.get("passphrase", None)
+ auth_type_private_key = auth_type_obj.get("private_key", None)
+
+ if not account_id or not warehouse or not database or not schema:
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST,
+ data={"message": "Missing required parameters: account id, warehouse, database, schema"},
+ )
+
+ if auth_type == "password" and (not auth_type_username or not auth_type_password):
+ return Response(
+ status=status.HTTP_400_BAD_REQUEST,
+ data={"message": "Missing required parameters: username, password"},
+ )
+
+ if auth_type == "keypair" and (
+ not auth_type_passphrase or not auth_type_private_key or not auth_type_username
+ ):
return Response(
status=status.HTTP_400_BAD_REQUEST,
- data={
- "message": "Missing required parameters: account id, warehouse, database, user, password, schema"
- },
+ data={"message": "Missing required parameters: passphrase, private key"},
)
try:
- result = get_snowflake_schemas(account_id, database, warehouse, user, password, schema, role)
+ result = get_snowflake_schemas(
+ account_id=account_id,
+ database=database,
+ warehouse=warehouse,
+ user=auth_type_username,
+ password=auth_type_password,
+ schema=schema,
+ role=role,
+ passphrase=auth_type_passphrase,
+ private_key=auth_type_private_key,
+ auth_type=auth_type,
+ )
if len(result.keys()) == 0:
return Response(
status=status.HTTP_400_BAD_REQUEST,
diff --git a/posthog/warehouse/models/external_data_schema.py b/posthog/warehouse/models/external_data_schema.py
index fae744be0795c..b9629c6410672 100644
--- a/posthog/warehouse/models/external_data_schema.py
+++ b/posthog/warehouse/models/external_data_schema.py
@@ -1,5 +1,7 @@
from collections import defaultdict
from datetime import datetime, timedelta
+import tempfile
+import os
from typing import Any, Optional
from django.db import models
from django_deprecate_fields import deprecate_field
@@ -97,6 +99,22 @@ def soft_delete(self):
self.deleted_at = datetime.now()
self.save()
+ def update_incremental_field_last_value(self, last_value: Any) -> None:
+ incremental_field_type = self.sync_type_config.get("incremental_field_type")
+
+ last_value_py = last_value.item() if isinstance(last_value, numpy.generic) else last_value
+
+ if (
+ incremental_field_type == IncrementalFieldType.Integer
+ or incremental_field_type == IncrementalFieldType.Numeric
+ ):
+ last_value_json = last_value_py
+ else:
+ last_value_json = str(last_value_py)
+
+ self.sync_type_config["incremental_field_last_value"] = last_value_json
+ self.save()
+
@database_sync_to_async
def asave_external_data_schema(schema: ExternalDataSchema) -> None:
@@ -218,16 +236,43 @@ def filter_snowflake_incremental_fields(columns: list[tuple[str, str]]) -> list[
def get_snowflake_schemas(
- account_id: str, database: str, warehouse: str, user: str, password: str, schema: str, role: Optional[str] = None
+ account_id: str,
+ database: str,
+ warehouse: str,
+ user: Optional[str],
+ password: Optional[str],
+ passphrase: Optional[str],
+ private_key: Optional[str],
+ auth_type: str,
+ schema: str,
+ role: Optional[str] = None,
) -> dict[str, list[tuple[str, str]]]:
+ auth_connect_args: dict[str, str | None] = {}
+ file_name: str | None = None
+
+ if auth_type == "keypair" and private_key is not None:
+ with tempfile.NamedTemporaryFile(delete=False) as tf:
+ tf.write(private_key.encode("utf-8"))
+ file_name = tf.name
+
+ auth_connect_args = {
+ "user": user,
+ "private_key_file": file_name,
+ "private_key_file_pwd": passphrase,
+ }
+ else:
+ auth_connect_args = {
+ "password": password,
+ "user": user,
+ }
+
with snowflake.connector.connect(
- user=user,
- password=password,
account=account_id,
warehouse=warehouse,
database=database,
schema="information_schema",
role=role,
+ **auth_connect_args,
) as connection:
with connection.cursor() as cursor:
if cursor is None:
@@ -243,7 +288,10 @@ def get_snowflake_schemas(
for row in result:
schema_list[row[0]].append((row[1], row[2]))
- return schema_list
+ if file_name is not None:
+ os.unlink(file_name)
+
+ return schema_list
def filter_postgres_incremental_fields(columns: list[tuple[str, str]]) -> list[tuple[str, IncrementalFieldType]]:
diff --git a/posthog/year_in_posthog/2023.html b/posthog/year_in_posthog/2024.html
similarity index 99%
rename from posthog/year_in_posthog/2023.html
rename to posthog/year_in_posthog/2024.html
index 5604fb0c8fbcb..cb52db77ad278 100644
--- a/posthog/year_in_posthog/2023.html
+++ b/posthog/year_in_posthog/2024.html
@@ -24,7 +24,7 @@
posthog.init('{{api_token}}', {
api_host: window.location.origin, loaded: function (posthog) {
posthog.capture('year in posthog viewed', {
- 'yearInPostHog': 2023, 'badge': '{{badge}}', {% for stat in stats %}
+ 'yearInPostHog': 2024, 'badge': '{{badge}}', {% for stat in stats %}
'{{ stat.description }}': {{ stat.count }},
{% endfor %}
});
diff --git a/posthog/year_in_posthog/calculate_2023.py b/posthog/year_in_posthog/calculate_2024.py
similarity index 92%
rename from posthog/year_in_posthog/calculate_2023.py
rename to posthog/year_in_posthog/calculate_2024.py
index 03428d2711d30..a260318cccb9c 100644
--- a/posthog/year_in_posthog/calculate_2023.py
+++ b/posthog/year_in_posthog/calculate_2024.py
@@ -18,7 +18,7 @@
posthog_dashboarditem
WHERE
NOT created_by_id IS NULL
- AND date_part('year', created_at) = 2023
+ AND date_part('year', created_at) = 2024
AND (
name IS NOT NULL
OR derived_name IS NOT NULL
@@ -41,7 +41,7 @@
key not ilike 'survey-targeting%%'
AND key not ilike 'prompt-%%'
AND key not ilike 'interview-%%'
- AND date_part('year', created_at) = 2023
+ AND date_part('year', created_at) = 2024
AND created_by_id = (select id from posthog_user where uuid = %(user_uuid)s)
GROUP BY
created_by_id
@@ -56,7 +56,7 @@
FROM
posthog_sessionrecordingviewed
WHERE
- date_part('year', created_at) = 2023
+ date_part('year', created_at) = 2024
AND user_id = (select id from posthog_user where uuid = %(user_uuid)s)
GROUP BY
user_id
@@ -71,7 +71,7 @@
FROM
posthog_experiment
WHERE
- date_part('year', created_at) = 2023
+ date_part('year', created_at) = 2024
AND created_by_id = (select id from posthog_user where uuid = %(user_uuid)s)
GROUP BY
created_by_id
@@ -86,7 +86,7 @@
FROM
posthog_dashboard
WHERE
- date_part('year', created_at) = 2023
+ date_part('year', created_at) = 2024
AND created_by_id = (select id from posthog_user where uuid = %(user_uuid)s)
GROUP BY
created_by_id
@@ -102,7 +102,7 @@
posthog_survey
WHERE
NOT created_by_id IS NULL
- AND date_part('year', created_at) = 2023
+ AND date_part('year', created_at) = 2024
AND created_by_id = (select id from posthog_user where uuid = %(user_uuid)s)
group by
created_by_id
@@ -111,7 +111,7 @@
id,
array_remove(
ARRAY [
- case when posthog_user.last_login >= '1-Jan-2023' then 'astronaut' end,
+ case when posthog_user.last_login >= '1-Jan-2024' then 'astronaut' end,
insight_stats.badge,
flag_stats.badge,
recording_viewed_stats.badge,
@@ -147,7 +147,7 @@ def dictfetchall(cursor):
@cache_for(timedelta(seconds=0 if settings.DEBUG else 30))
-def calculate_year_in_posthog_2023(user_uuid: str) -> Optional[dict]:
+def calculate_year_in_posthog_2024(user_uuid: str) -> Optional[dict]:
with connection.cursor() as cursor:
cursor.execute(query, {"user_uuid": user_uuid})
rows = dictfetchall(cursor)
diff --git a/posthog/year_in_posthog/hibernating.html b/posthog/year_in_posthog/hibernating.html
index 873f10528be13..c7e57048087a9 100644
--- a/posthog/year_in_posthog/hibernating.html
+++ b/posthog/year_in_posthog/hibernating.html
@@ -24,7 +24,7 @@
posthog.init('{{api_token}}', {
api_host: window.location.origin, loaded: function (posthog) {
posthog.capture('year in posthog hibernating viewed', {
- 'yearInPostHog': 2022
+ 'yearInPostHog': 2023
});
}
})
diff --git a/posthog/year_in_posthog/sharing-buttons.html b/posthog/year_in_posthog/sharing-buttons.html
index 269f25cb456b3..8cabf76a97c19 100644
--- a/posthog/year_in_posthog/sharing-buttons.html
+++ b/posthog/year_in_posthog/sharing-buttons.html
@@ -1,8 +1,8 @@
Share to