diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 8849e6aefe280..7344b83aef40b 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -218,7 +218,7 @@ jobs: - name: Run clippy if: needs.changes.outputs.rust == 'true' - run: cargo clippy -- -D warnings + run: cargo clippy --all-targets --all-features -- -D warnings - name: Run cargo check if: needs.changes.outputs.rust == 'true' diff --git a/cypress/e2e/notebooks-insights.ts b/cypress/e2e/notebooks-insights.ts index 0b007744576c6..9c564692810aa 100644 --- a/cypress/e2e/notebooks-insights.ts +++ b/cypress/e2e/notebooks-insights.ts @@ -5,7 +5,18 @@ describe('Notebooks', () => { cy.clickNavMenu('notebooks') cy.location('pathname').should('include', '/notebooks') }) - ;['SQL', 'TRENDS', 'FUNNELS', 'RETENTION', 'PATHS', 'STICKINESS', 'LIFECYCLE'].forEach((insightType) => { + + it(`Can add a HogQL insight`, () => { + savedInsights.createNewInsightOfType('SQL') + insight.editName('SQL Insight') + insight.save() + cy.get('[data-attr="notebooks-add-button"]').click() + cy.get('[data-attr="notebooks-select-button-create"]').click() + cy.get('.ErrorBoundary').should('not.exist') + // Detect if table settings are present. They shouldn't appear in the block, but rather on side. + cy.get('[data-attr="notebook-node-query"]').get('[data-attr="export-button"]').should('not.exist') + }) + ;['TRENDS', 'FUNNELS', 'RETENTION', 'PATHS', 'STICKINESS', 'LIFECYCLE'].forEach((insightType) => { it(`Can add a ${insightType} insight`, () => { savedInsights.createNewInsightOfType(insightType) insight.editName(`${insightType} Insight`) diff --git a/docker/clickhouse/user_defined_function.xml b/docker/clickhouse/user_defined_function.xml index b8fac26d1887c..b48169884a53f 100644 --- a/docker/clickhouse/user_defined_function.xml +++ b/docker/clickhouse/user_defined_function.xml @@ -138,7 +138,7 @@ executable_pool aggregate_funnel_trends - Array(Tuple(UInt64, Int8, Nullable(String))) + Array(Tuple(UInt64, Int8, Nullable(String), UUID)) result UInt8 @@ -169,7 +169,7 @@ prop_vals - Array(Tuple(Nullable(Float64), UInt64, Nullable(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Nullable(String), Array(Int8))) value JSONEachRow @@ -181,7 +181,7 @@ executable_pool aggregate_funnel_array_trends - Array(Tuple(UInt64, Int8, Array(String))) + Array(Tuple(UInt64, Int8, Array(String), UUID)) result UInt8 @@ -208,7 +208,7 @@ prop_vals - Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) value JSONEachRow @@ -220,7 +220,7 @@ executable_pool aggregate_funnel_cohort_trends - Array(Tuple(UInt64, Int8, UInt64)) + Array(Tuple(UInt64, Int8, UInt64, UUID)) result UInt8 @@ -247,7 +247,7 @@ prop_vals - Array(Tuple(Nullable(Float64), UInt64, UInt64, Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, UInt64, Array(Int8))) value JSONEachRow @@ -285,7 +285,7 @@ prop_vals - Array(Tuple(Nullable(Float64), UInt64, Array(String), Array(Int8))) + Array(Tuple(Nullable(Float64), UInt64, UUID, Array(String), Array(Int8))) value JSONEachRow diff --git a/ee/clickhouse/queries/experiments/funnel_experiment_result.py b/ee/clickhouse/queries/experiments/funnel_experiment_result.py index e311657cc52c7..f68816ed3b129 100644 --- a/ee/clickhouse/queries/experiments/funnel_experiment_result.py +++ b/ee/clickhouse/queries/experiments/funnel_experiment_result.py @@ -13,6 +13,7 @@ calculate_credible_intervals, calculate_probabilities, ) +from posthog.models.experiment import ExperimentHoldout from posthog.models.feature_flag import FeatureFlag from posthog.models.filters.filter import Filter from posthog.models.team import Team @@ -54,10 +55,13 @@ def __init__( feature_flag: FeatureFlag, experiment_start_date: datetime, experiment_end_date: Optional[datetime] = None, + holdout: Optional[ExperimentHoldout] = None, funnel_class: type[ClickhouseFunnel] = ClickhouseFunnel, ): breakdown_key = f"$feature/{feature_flag.key}" self.variants = [variant["key"] for variant in feature_flag.variants] + if holdout: + self.variants.append(f"holdout-{holdout.id}") # our filters assume that the given time ranges are in the project timezone. # while start and end date are in UTC. diff --git a/ee/clickhouse/queries/experiments/trend_experiment_result.py b/ee/clickhouse/queries/experiments/trend_experiment_result.py index 0971120f2366a..ac9508d21051c 100644 --- a/ee/clickhouse/queries/experiments/trend_experiment_result.py +++ b/ee/clickhouse/queries/experiments/trend_experiment_result.py @@ -22,6 +22,7 @@ calculate_credible_intervals, calculate_probabilities, ) +from posthog.models.experiment import ExperimentHoldout from posthog.models.feature_flag import FeatureFlag from posthog.models.filters.filter import Filter from posthog.models.team import Team @@ -81,9 +82,12 @@ def __init__( experiment_end_date: Optional[datetime] = None, trend_class: type[Trends] = Trends, custom_exposure_filter: Optional[Filter] = None, + holdout: Optional[ExperimentHoldout] = None, ): breakdown_key = f"$feature/{feature_flag.key}" self.variants = [variant["key"] for variant in feature_flag.variants] + if holdout: + self.variants.append(f"holdout-{holdout.id}") # our filters assume that the given time ranges are in the project timezone. # while start and end date are in UTC. diff --git a/ee/clickhouse/views/experiment_holdouts.py b/ee/clickhouse/views/experiment_holdouts.py new file mode 100644 index 0000000000000..c7d8eff83ce5a --- /dev/null +++ b/ee/clickhouse/views/experiment_holdouts.py @@ -0,0 +1,110 @@ +from typing import Any +from rest_framework import serializers, viewsets +from rest_framework.exceptions import ValidationError +from rest_framework.request import Request +from rest_framework.response import Response +from django.db import transaction + + +from posthog.api.feature_flag import FeatureFlagSerializer +from posthog.api.routing import TeamAndOrgViewSetMixin +from posthog.api.shared import UserBasicSerializer +from posthog.models.experiment import ExperimentHoldout + + +class ExperimentHoldoutSerializer(serializers.ModelSerializer): + created_by = UserBasicSerializer(read_only=True) + + class Meta: + model = ExperimentHoldout + fields = [ + "id", + "name", + "description", + "filters", + "created_by", + "created_at", + "updated_at", + ] + read_only_fields = [ + "id", + "created_by", + "created_at", + "updated_at", + ] + + def _get_filters_with_holdout_id(self, id: int, filters: list) -> list: + variant_key = f"holdout-{id}" + updated_filters = [] + for filter in filters: + updated_filters.append( + { + **filter, + "variant": variant_key, + } + ) + return updated_filters + + def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> ExperimentHoldout: + request = self.context["request"] + validated_data["created_by"] = request.user + validated_data["team_id"] = self.context["team_id"] + + if not validated_data.get("filters"): + raise ValidationError("Filters are required to create an holdout group") + + instance = super().create(validated_data) + instance.filters = self._get_filters_with_holdout_id(instance.id, instance.filters) + instance.save() + return instance + + def update(self, instance: ExperimentHoldout, validated_data): + filters = validated_data.get("filters") + if filters and instance.filters != filters: + # update flags on all experiments in this holdout group + new_filters = self._get_filters_with_holdout_id(instance.id, filters) + validated_data["filters"] = new_filters + with transaction.atomic(): + for experiment in instance.experiment_set.all(): + flag = experiment.feature_flag + existing_flag_serializer = FeatureFlagSerializer( + flag, + data={ + "filters": {**flag.filters, "holdout_groups": validated_data["filters"]}, + }, + partial=True, + context=self.context, + ) + existing_flag_serializer.is_valid(raise_exception=True) + existing_flag_serializer.save() + + return super().update(instance, validated_data) + + +class ExperimentHoldoutViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): + scope_object = "experiment" + queryset = ExperimentHoldout.objects.prefetch_related("created_by").all() + serializer_class = ExperimentHoldoutSerializer + ordering = "-created_at" + + def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: + instance = self.get_object() + + with transaction.atomic(): + for experiment in instance.experiment_set.all(): + flag = experiment.feature_flag + existing_flag_serializer = FeatureFlagSerializer( + flag, + data={ + "filters": { + **flag.filters, + "holdout_groups": None, + } + }, + partial=True, + context={"request": request, "team": self.team, "team_id": self.team_id}, + ) + existing_flag_serializer.is_valid(raise_exception=True) + existing_flag_serializer.save() + + return super().destroy(request, *args, **kwargs) diff --git a/ee/clickhouse/views/experiments.py b/ee/clickhouse/views/experiments.py index 7aed519d29ee6..6df24dc012cea 100644 --- a/ee/clickhouse/views/experiments.py +++ b/ee/clickhouse/views/experiments.py @@ -19,6 +19,7 @@ ClickhouseTrendExperimentResult, ) from ee.clickhouse.queries.experiments.utils import requires_flag_warning +from ee.clickhouse.views.experiment_holdouts import ExperimentHoldoutSerializer from posthog.api.cohort import CohortSerializer from posthog.api.feature_flag import FeatureFlagSerializer, MinimalFeatureFlagSerializer from posthog.api.routing import TeamAndOrgViewSetMixin @@ -27,7 +28,7 @@ from posthog.caching.insight_cache import update_cached_state from posthog.clickhouse.query_tagging import tag_queries from posthog.constants import INSIGHT_TRENDS -from posthog.models.experiment import Experiment +from posthog.models.experiment import Experiment, ExperimentHoldout from posthog.models.filters.filter import Filter from posthog.utils import generate_cache_key, get_safe_cache @@ -50,6 +51,7 @@ def _calculate_experiment_results(experiment: Experiment, refresh: bool = False) experiment.feature_flag, experiment.start_date, experiment.end_date, + holdout=experiment.holdout, custom_exposure_filter=exposure_filter, ).get_results() else: @@ -59,6 +61,7 @@ def _calculate_experiment_results(experiment: Experiment, refresh: bool = False) experiment.feature_flag, experiment.start_date, experiment.end_date, + holdout=experiment.holdout, ).get_results() return _experiment_results_cached( @@ -156,6 +159,10 @@ class ExperimentSerializer(serializers.ModelSerializer): feature_flag_key = serializers.CharField(source="get_feature_flag_key") created_by = UserBasicSerializer(read_only=True) feature_flag = MinimalFeatureFlagSerializer(read_only=True) + holdout = ExperimentHoldoutSerializer(read_only=True) + holdout_id = serializers.PrimaryKeyRelatedField( + queryset=ExperimentHoldout.objects.all(), source="holdout", required=False, allow_null=True + ) class Meta: model = Experiment @@ -167,6 +174,8 @@ class Meta: "end_date", "feature_flag_key", "feature_flag", + "holdout", + "holdout_id", "exposure_cohort", "parameters", "secondary_metrics", @@ -183,6 +192,7 @@ class Meta: "updated_at", "feature_flag", "exposure_cohort", + "holdout", ] def validate_parameters(self, value): @@ -221,6 +231,10 @@ def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Experiment: if properties: raise ValidationError("Experiments do not support global filter properties") + holdout_groups = None + if validated_data.get("holdout"): + holdout_groups = validated_data["holdout"].filters + default_variants = [ {"key": "control", "name": "Control Group", "rollout_percentage": 50}, {"key": "test", "name": "Test Variant", "rollout_percentage": 50}, @@ -230,6 +244,7 @@ def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Experiment: "groups": [{"properties": properties, "rollout_percentage": 100}], "multivariate": {"variants": variants or default_variants}, "aggregation_group_type_index": aggregation_group_type_index, + "holdout_groups": holdout_groups, } feature_flag_serializer = FeatureFlagSerializer( @@ -263,6 +278,7 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg "parameters", "archived", "secondary_metrics", + "holdout", } given_keys = set(validated_data.keys()) extra_keys = given_keys - expected_keys @@ -273,7 +289,7 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg if extra_keys: raise ValidationError(f"Can't update keys: {', '.join(sorted(extra_keys))} on Experiment") - # if an experiment has launched, we cannot edit its variants anymore. + # if an experiment has launched, we cannot edit its variants or holdout anymore. if not instance.is_draft: if "feature_flag_variants" in validated_data.get("parameters", {}): if len(validated_data["parameters"]["feature_flag_variants"]) != len(feature_flag.variants): @@ -285,13 +301,19 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg != 1 ): raise ValidationError("Can't update feature_flag_variants on Experiment") + if "holdout" in validated_data and validated_data["holdout"] != instance.holdout: + raise ValidationError("Can't update holdout on running Experiment") properties = validated_data.get("filters", {}).get("properties") if properties: raise ValidationError("Experiments do not support global filter properties") if instance.is_draft: - # if feature flag variants have changed, update the feature flag. + # if feature flag variants or holdout have changed, update the feature flag. + holdout_groups = instance.holdout.filters if instance.holdout else None + if "holdout" in validated_data: + holdout_groups = validated_data["holdout"].filters if validated_data["holdout"] else None + if validated_data.get("parameters"): variants = validated_data["parameters"].get("feature_flag_variants", []) aggregation_group_type_index = validated_data["parameters"].get("aggregation_group_type_index") @@ -312,6 +334,7 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg "groups": [{"properties": properties, "rollout_percentage": 100}], "multivariate": {"variants": variants or default_variants}, "aggregation_group_type_index": aggregation_group_type_index, + "holdout_groups": holdout_groups, } existing_flag_serializer = FeatureFlagSerializer( @@ -322,6 +345,17 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg ) existing_flag_serializer.is_valid(raise_exception=True) existing_flag_serializer.save() + else: + # no parameters provided, just update the holdout if necessary + if "holdout" in validated_data: + existing_flag_serializer = FeatureFlagSerializer( + feature_flag, + data={"filters": {**feature_flag.filters, "holdout_groups": holdout_groups}}, + partial=True, + context=self.context, + ) + existing_flag_serializer.is_valid(raise_exception=True) + existing_flag_serializer.save() if instance.is_draft and has_start_date: feature_flag.active = True @@ -336,7 +370,7 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg class EnterpriseExperimentsViewSet(TeamAndOrgViewSetMixin, viewsets.ModelViewSet): scope_object = "experiment" serializer_class = ExperimentSerializer - queryset = Experiment.objects.prefetch_related("feature_flag", "created_by").all() + queryset = Experiment.objects.prefetch_related("feature_flag", "created_by", "holdout").all() ordering = "-created_at" # ****************************************** diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index fdd0c05656c7c..7377c76eaef5a 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -123,6 +123,192 @@ def test_creating_updating_basic_experiment(self): self.assertEqual(experiment.description, "Bazinga") self.assertEqual(experiment.end_date.strftime("%Y-%m-%dT%H:%M"), end_date) + def test_transferring_holdout_to_another_group(self): + response = self.client.post( + f"/api/projects/{self.team.id}/experiment_holdouts/", + data={ + "name": "Test Experiment holdout", + "filters": [ + { + "properties": [], + "rollout_percentage": 20, + "variant": "holdout", + } + ], + }, + format="json", + ) + + holdout_id = response.json()["id"] + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.json()["name"], "Test Experiment holdout") + self.assertEqual( + response.json()["filters"], + [{"properties": [], "rollout_percentage": 20, "variant": f"holdout-{holdout_id}"}], + ) + + # Generate draft experiment to be part of holdout + ff_key = "a-b-tests" + response = self.client.post( + f"/api/projects/{self.team.id}/experiments/", + { + "name": "Test Experiment", + "description": "", + "start_date": None, + "end_date": None, + "feature_flag_key": ff_key, + "parameters": None, + "filters": { + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], + "properties": [], + }, + "holdout_id": holdout_id, + }, + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.json()["name"], "Test Experiment") + self.assertEqual(response.json()["feature_flag_key"], ff_key) + + created_ff = FeatureFlag.objects.get(key=ff_key) + + self.assertEqual(created_ff.key, ff_key) + self.assertEqual( + created_ff.filters["holdout_groups"], + [{"properties": [], "rollout_percentage": 20, "variant": f"holdout-{holdout_id}"}], + ) + + exp_id = response.json()["id"] + + # new holdout, and update experiment + response = self.client.post( + f"/api/projects/{self.team.id}/experiment_holdouts/", + data={ + "name": "Test Experiment holdout 2", + "filters": [ + { + "properties": [], + "rollout_percentage": 5, + "variant": "holdout", + } + ], + }, + format="json", + ) + holdout_2_id = response.json()["id"] + + response = self.client.patch( + f"/api/projects/{self.team.id}/experiments/{exp_id}", + {"holdout_id": holdout_2_id}, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + experiment = Experiment.objects.get(pk=exp_id) + self.assertEqual(experiment.holdout_id, holdout_2_id) + + created_ff = FeatureFlag.objects.get(key=ff_key) + self.assertEqual( + created_ff.filters["holdout_groups"], + [{"properties": [], "rollout_percentage": 5, "variant": f"holdout-{holdout_2_id}"}], + ) + + # update parameters + response = self.client.patch( + f"/api/projects/{self.team.id}/experiments/{exp_id}", + { + "parameters": { + "feature_flag_variants": [ + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 34, + }, + ] + }, + }, + ) + + experiment = Experiment.objects.get(pk=exp_id) + self.assertEqual(experiment.holdout_id, holdout_2_id) + + created_ff = FeatureFlag.objects.get(key=ff_key) + self.assertEqual( + created_ff.filters["holdout_groups"], + [{"properties": [], "rollout_percentage": 5, "variant": f"holdout-{holdout_2_id}"}], + ) + self.assertEqual( + created_ff.filters["multivariate"]["variants"], + [ + {"key": "control", "name": "Control Group", "rollout_percentage": 33}, + {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, + {"key": "test_2", "name": "Test Variant", "rollout_percentage": 34}, + ], + ) + + # remove holdouts + response = self.client.patch( + f"/api/projects/{self.team.id}/experiments/{exp_id}", + {"holdout_id": None}, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + experiment = Experiment.objects.get(pk=exp_id) + self.assertEqual(experiment.holdout_id, None) + + created_ff = FeatureFlag.objects.get(key=ff_key) + self.assertEqual(created_ff.filters["holdout_groups"], None) + + # try adding invalid holdout + response = self.client.patch( + f"/api/projects/{self.team.id}/experiments/{exp_id}", + {"holdout_id": 123456}, + ) + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.json()["detail"], 'Invalid pk "123456" - object does not exist.') + + # add back holdout + response = self.client.patch( + f"/api/projects/{self.team.id}/experiments/{exp_id}", + {"holdout_id": holdout_2_id}, + ) + + # launch experiment and try updating holdouts again + response = self.client.patch( + f"/api/projects/{self.team.id}/experiments/{exp_id}", + {"start_date": "2021-12-01T10:23"}, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + + response = self.client.patch( + f"/api/projects/{self.team.id}/experiments/{exp_id}", + {"holdout_id": holdout_id}, + ) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.json()["detail"], "Can't update holdout on running Experiment") + + created_ff = FeatureFlag.objects.get(key=ff_key) + self.assertEqual( + created_ff.filters["holdout_groups"], + [{"properties": [], "rollout_percentage": 5, "variant": f"holdout-{holdout_2_id}"}], + ) + def test_adding_behavioral_cohort_filter_to_experiment_fails(self): cohort = Cohort.objects.create( team=self.team, @@ -1119,6 +1305,7 @@ def test_create_experiment_updates_feature_flag_cache(self): ] }, "aggregation_group_type_index": None, + "holdout_groups": None, }, ) @@ -1170,6 +1357,7 @@ def test_create_experiment_updates_feature_flag_cache(self): ] }, "aggregation_group_type_index": None, + "holdout_groups": None, }, ) @@ -1237,6 +1425,7 @@ def test_create_experiment_updates_feature_flag_cache(self): ] }, "aggregation_group_type_index": None, + "holdout_groups": None, }, ) diff --git a/ee/clickhouse/views/test/test_experiment_holdouts.py b/ee/clickhouse/views/test/test_experiment_holdouts.py new file mode 100644 index 0000000000000..4d067d14835f4 --- /dev/null +++ b/ee/clickhouse/views/test/test_experiment_holdouts.py @@ -0,0 +1,145 @@ +from rest_framework import status + +from ee.api.test.base import APILicensedTest +from posthog.models.experiment import Experiment +from posthog.models.feature_flag import FeatureFlag + + +class TestExperimentHoldoutCRUD(APILicensedTest): + def test_can_list_experiment_holdouts(self): + response = self.client.get(f"/api/projects/{self.team.id}/experiment_holdouts/") + self.assertEqual(response.status_code, status.HTTP_200_OK) + + def test_create_update_experiment_holdouts(self) -> None: + response = self.client.post( + f"/api/projects/{self.team.id}/experiment_holdouts/", + data={ + "name": "Test Experiment holdout", + "filters": [ + { + "properties": [], + "rollout_percentage": 20, + "variant": "holdout", + } + ], + }, + format="json", + ) + + holdout_id = response.json()["id"] + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.json()["name"], "Test Experiment holdout") + self.assertEqual( + response.json()["filters"], + [{"properties": [], "rollout_percentage": 20, "variant": f"holdout-{holdout_id}"}], + ) + + # Generate experiment to be part of holdout + ff_key = "a-b-tests" + response = self.client.post( + f"/api/projects/{self.team.id}/experiments/", + { + "name": "Test Experiment", + "description": "", + "start_date": "2021-12-01T10:23", + "end_date": None, + "feature_flag_key": ff_key, + "parameters": None, + "filters": { + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], + "properties": [], + }, + "holdout_id": holdout_id, + }, + ) + + self.assertEqual(response.status_code, status.HTTP_201_CREATED) + self.assertEqual(response.json()["name"], "Test Experiment") + self.assertEqual(response.json()["feature_flag_key"], ff_key) + + created_ff = FeatureFlag.objects.get(key=ff_key) + + self.assertEqual(created_ff.key, ff_key) + self.assertEqual(created_ff.filters["multivariate"]["variants"][0]["key"], "control") + self.assertEqual(created_ff.filters["multivariate"]["variants"][1]["key"], "test") + self.assertEqual(created_ff.filters["groups"][0]["properties"], []) + self.assertEqual( + created_ff.filters["holdout_groups"], + [{"properties": [], "rollout_percentage": 20, "variant": f"holdout-{holdout_id}"}], + ) + + exp_id = response.json()["id"] + # Now try updating holdout + response = self.client.patch( + f"/api/projects/{self.team.id}/experiment_holdouts/{holdout_id}", + { + "name": "Test Experiment holdout 2", + "filters": [ + { + "properties": [], + "rollout_percentage": 30, + "variant": "holdout", + } + ], + }, + ) + + self.assertEqual(response.status_code, status.HTTP_200_OK) + self.assertEqual(response.json()["name"], "Test Experiment holdout 2") + self.assertEqual( + response.json()["filters"], + [{"properties": [], "rollout_percentage": 30, "variant": f"holdout-{holdout_id}"}], + ) + + # make sure flag for experiment in question was updated as well + created_ff = FeatureFlag.objects.get(key=ff_key) + self.assertEqual( + created_ff.filters["holdout_groups"], + [{"properties": [], "rollout_percentage": 30, "variant": f"holdout-{holdout_id}"}], + ) + + # now delete holdout + response = self.client.delete(f"/api/projects/{self.team.id}/experiment_holdouts/{holdout_id}") + self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) + + # make sure flag for experiment in question was updated as well + created_ff = FeatureFlag.objects.get(key=ff_key) + self.assertEqual(created_ff.filters["holdout_groups"], None) + + # and same for experiment + exp = Experiment.objects.get(pk=exp_id) + self.assertEqual(exp.holdout, None) + + def test_invalid_create(self): + response = self.client.post( + f"/api/projects/{self.team.id}/experiment_holdouts/", + data={ + "name": None, # invalid + "filters": [ + { + "properties": [], + "rollout_percentage": 20, + "variant": "holdout", + } + ], + }, + format="json", + ) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.json()["detail"], "This field may not be null.") + + response = self.client.post( + f"/api/projects/{self.team.id}/experiment_holdouts/", + data={ + "name": "xyz", + "filters": [], + }, + format="json", + ) + + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) + self.assertEqual(response.json()["detail"], "Filters are required to create an holdout group") diff --git a/ee/hogai/assistant.py b/ee/hogai/assistant.py new file mode 100644 index 0000000000000..e47020fdcdf04 --- /dev/null +++ b/ee/hogai/assistant.py @@ -0,0 +1,104 @@ +from collections.abc import Generator +from typing import Any, Literal, TypedDict, TypeGuard, Union, cast + +from langchain_core.messages import AIMessageChunk +from langfuse.callback import CallbackHandler +from langgraph.graph.state import StateGraph + +from ee import settings +from ee.hogai.trends.nodes import CreateTrendsPlanNode, CreateTrendsPlanToolsNode, GenerateTrendsNode +from ee.hogai.utils import AssistantNodeName, AssistantState, Conversation +from posthog.models.team.team import Team +from posthog.schema import VisualizationMessage + +if settings.LANGFUSE_PUBLIC_KEY: + langfuse_handler = CallbackHandler( + public_key=settings.LANGFUSE_PUBLIC_KEY, secret_key=settings.LANGFUSE_SECRET_KEY, host=settings.LANGFUSE_HOST + ) +else: + langfuse_handler = None + + +def is_value_update(update: list[Any]) -> TypeGuard[tuple[Literal["values"], dict[AssistantNodeName, Any]]]: + """ + Transition between nodes. + """ + return len(update) == 2 and update[0] == "updates" + + +class LangGraphState(TypedDict): + langgraph_node: AssistantNodeName + + +def is_message_update( + update: list[Any], +) -> TypeGuard[tuple[Literal["messages"], tuple[Union[AIMessageChunk, Any], LangGraphState]]]: + """ + Streaming of messages. Returns a partial state. + """ + return len(update) == 2 and update[0] == "messages" + + +class Assistant: + _team: Team + _graph: StateGraph + + def __init__(self, team: Team): + self._team = team + self._graph = StateGraph(AssistantState) + + def _compile_graph(self): + builder = self._graph + + create_trends_plan_node = CreateTrendsPlanNode(self._team) + builder.add_node(CreateTrendsPlanNode.name, create_trends_plan_node.run) + + create_trends_plan_tools_node = CreateTrendsPlanToolsNode(self._team) + builder.add_node(CreateTrendsPlanToolsNode.name, create_trends_plan_tools_node.run) + + generate_trends_node = GenerateTrendsNode(self._team) + builder.add_node(GenerateTrendsNode.name, generate_trends_node.run) + + builder.add_edge(AssistantNodeName.START, create_trends_plan_node.name) + builder.add_conditional_edges(create_trends_plan_node.name, create_trends_plan_node.router) + builder.add_conditional_edges(create_trends_plan_tools_node.name, create_trends_plan_tools_node.router) + builder.add_conditional_edges(GenerateTrendsNode.name, generate_trends_node.router) + + return builder.compile() + + def stream(self, conversation: Conversation) -> Generator[str, None, None]: + assistant_graph = self._compile_graph() + callbacks = [langfuse_handler] if langfuse_handler else [] + messages = [message.root for message in conversation.messages] + + generator = assistant_graph.stream( + {"messages": messages}, + config={"recursion_limit": 24, "callbacks": callbacks}, + stream_mode=["messages", "updates"], + ) + + chunks = AIMessageChunk(content="") + + # Send a chunk to establish the connection avoiding the worker's timeout. + yield "" + + for update in generator: + if is_value_update(update): + _, state_update = update + if ( + AssistantNodeName.GENERATE_TRENDS in state_update + and "messages" in state_update[AssistantNodeName.GENERATE_TRENDS] + ): + message = cast(VisualizationMessage, state_update[AssistantNodeName.GENERATE_TRENDS]["messages"][0]) + yield message.model_dump_json() + elif is_message_update(update): + langchain_message, langgraph_state = update[1] + if langgraph_state["langgraph_node"] == AssistantNodeName.GENERATE_TRENDS and isinstance( + langchain_message, AIMessageChunk + ): + chunks += langchain_message # type: ignore + parsed_message = GenerateTrendsNode.parse_output(chunks.tool_calls[0]["args"]) + if parsed_message: + yield VisualizationMessage( + reasoning_steps=parsed_message.reasoning_steps, answer=parsed_message.answer + ).model_dump_json() diff --git a/ee/hogai/generate_trends_agent.py b/ee/hogai/generate_trends_agent.py deleted file mode 100644 index 9980ff82dbeba..0000000000000 --- a/ee/hogai/generate_trends_agent.py +++ /dev/null @@ -1,55 +0,0 @@ -from typing import Literal, Optional - -from langchain_core.output_parsers.openai_tools import PydanticToolsParser -from langchain_core.prompts import ChatPromptTemplate -from langchain_openai import ChatOpenAI -from pydantic import BaseModel, Field - -from ee.hogai.system_prompt import trends_system_prompt -from ee.hogai.team_prompt import TeamPrompt -from ee.hogai.trends_function import TrendsFunction -from posthog.models.team.team import Team -from posthog.schema import ExperimentalAITrendsQuery - - -class output_insight_schema(BaseModel): - reasoning_steps: Optional[list[str]] = None - answer: ExperimentalAITrendsQuery - - -class ChatMessage(BaseModel): - role: Literal["user", "assistant"] - content: str = Field(..., max_length=2500) - - -class Conversation(BaseModel): - messages: list[ChatMessage] = Field(..., max_length=20) - session_id: str - - -class GenerateTrendsAgent: - _team: Team - - def __init__(self, team: Team): - self._team = team - - def bootstrap(self, messages: list[ChatMessage], user_prompt: str | None = None): - llm = ChatOpenAI(model="gpt-4o-2024-08-06", stream_usage=True).bind_tools( - [TrendsFunction().generate_function()], tool_choice="output_insight_schema" - ) - user_prompt = ( - user_prompt - or "Answer to my question:\n{{question}}\n" + TeamPrompt(self._team).generate_prompt() - ) - - prompts = ChatPromptTemplate.from_messages( - [ - ("system", trends_system_prompt), - ("user", user_prompt), - *[(message.role, message.content) for message in messages[1:]], - ], - template_format="mustache", - ) - - chain = prompts | llm | PydanticToolsParser(tools=[output_insight_schema]) # type: ignore - return chain diff --git a/ee/hogai/hardcoded_definitions.py b/ee/hogai/hardcoded_definitions.py index ee13c49c3ca63..166c53bf87c0c 100644 --- a/ee/hogai/hardcoded_definitions.py +++ b/ee/hogai/hardcoded_definitions.py @@ -54,7 +54,7 @@ }, "$identify": { "label": "Identify", - "description": "A user has been identified with properties", + "description": "Identifies an anonymous user. This event doesn't show how many users you have but rather how many users used an account.", }, "$create_alias": { "label": "Alias", @@ -915,8 +915,8 @@ "session_properties": { "$session_duration": { "label": "Session duration", - "description": "The duration of the session being tracked. Learn more about how PostHog tracks sessions in our documentation.\n\nNote, if the duration is formatted as a single number (not 'HH:MM:SS'), it's in seconds.", - "examples": ["01:04:12"], + "description": "The duration of the session being tracked in seconds.", + "examples": ["30", "146", "2"], "type": "Numeric", }, "$start_timestamp": { diff --git a/ee/hogai/system_prompt.py b/ee/hogai/system_prompt.py deleted file mode 100644 index fb00b35825867..0000000000000 --- a/ee/hogai/system_prompt.py +++ /dev/null @@ -1,77 +0,0 @@ -trends_system_prompt = """ -As a recognized head of product growth acting as a top-tier data engineer, your task is to write queries of trends insights for customers using a JSON schema. - -Follow these instructions to create a query: -* Identify the events or actions the user wants to analyze. -* Determine types of entities that user wants to analyze like events, persons, groups, sessions, cohorts, etc. -* Determine a vistualization type that best suits the user's needs. -* Determine if the user wants to name the series or use the default names. -* Choose the date range and the interval the user wants to analyze. -* Determine if the user wants to compare the results to a previous period or use smoothing. -* Determine if the user wants to use property filters for all series. -* Determine math types for all series. -* Determine property filters for individual series. -* Check operators of property filters for individual and all series. Make sure the operators correspond to the user's request. You may need to use "contains" for strings if you're not sure about the exact value. -* Determine if the user wants to use a breakdown filter. -* Determine if the user wants to filter out internal and test users. If the user didn't specify, filter out internal and test users by default. -* Determine if the user wants to use sampling factor. -* Determine if it's useful to show a legend, values of series, units, y-axis scale type, etc. -* Use your judgement if there are any other parameters that the user might want to adjust that aren't listed here. - -Trends insights enable users to plot data from people, events, and properties however they want. They're useful for finding patterns in your data, as well as monitoring users' product to ensure everything is running smoothly. For example, using trends, users can analyze: -- How product's most important metrics change over time. -- Long-term patterns, or cycles in product's usage. -- How a specific change affects usage. -- The usage of different features side-by-side. -- How the properties of events vary using aggregation (sum, average, etc). -- Users can also visualize the same data points in a variety of ways. - -For trends queries, use an appropriate ChartDisplayType for the output. For example: -- if the user wants to see a dynamics in time like a line graph, use `ActionsLineGraph`. -- if the user wants to see cumulative dynamics across time, use `ActionsLineGraphCumulative`. -- if the user asks a question where you can answer with a single number, use `BoldNumber`. -- if the user wants a table, use `ActionsTable`. -- if the data is categorical, use `ActionsBar`. -- if the data is easy to understand in a pie chart, use `ActionsPie`. -- if the user has only one series and they want to see data from particular countries, use `WorldMap`. - -The user might want to get insights for groups. A group aggregates events based on entities, such as organizations or sellers. The user might provide a list of group names and their numeric indexes. Instead of a group's name, always use its numeric index. - -Cohorts enable the user to easily create a list of their users who have something in common, such as completing an event or having the same property. The user might want to use cohorts for filtering events. Instead of a cohort's name, always use its ID. - -If you want to apply Y-Axis unit, make sure it will display data correctly. Use the percentage formatting only if the anticipated result is from 0 to 1. - -Learn on these examples: -Q: How many users do I have? -A: {"dateRange":{"date_from":"all"},"interval":"month","kind":"TrendsQuery","series":[{"event":"user signed up","kind":"EventsNode","math":"total"}],"trendsFilter":{"aggregationAxisFormat":"numeric","display":"BoldNumber"}} -Q: Show a bar chart of the organic search traffic for the last month grouped by week. -A: {"dateRange":{"date_from":"-30d","date_to":null,"explicitDate":false},"interval":"week","kind":"TrendsQuery","series":[{"event":"$pageview","kind":"EventsNode","math":"dau","properties":[{"key":"$referring_domain","operator":"icontains","type":"event","value":"google"},{"key":"utm_source","operator":"is_not_set","type":"event","value":"is_not_set"}]}],"trendsFilter":{"aggregationAxisFormat":"numeric","display":"ActionsBar"}} -Q: insight created unique users & first-time users for the last 12m) -A: {"dateRange":{"date_from":"-12m","date_to":""},"filterTestAccounts":true,"interval":"month","kind":"TrendsQuery","series":[{"event":"insight created","kind":"EventsNode","math":"dau","custom_name":"insight created"},{"event":"insight created","kind":"EventsNode","math":"first_time_for_user","custom_name":"insight created"}],"trendsFilter":{"aggregationAxisFormat":"numeric","display":"ActionsLineGraph"}} -Q: What are the top 10 referring domains for the last month? -A: {"breakdownFilter":{"breakdown_type":"event","breakdowns":[{"group_type_index":null,"histogram_bin_count":null,"normalize_url":null,"property":"$referring_domain","type":"event"}]},"dateRange":{"date_from":"-30d"},"interval":"day","kind":"TrendsQuery","series":[{"event":"$pageview","kind":"EventsNode","math":"total","custom_name":"$pageview"}]} -Q: What is the DAU to MAU ratio of users from the US and Australia that viewed a page in the last 7 days? Compare it to the previous period. -A: {"compareFilter":{"compare":true,"compare_to":null},"dateRange":{"date_from":"-7d"},"interval":"day","kind":"TrendsQuery","properties":{"type":"AND","values":[{"type":"AND","values":[{"key":"$geoip_country_name","operator":"exact","type":"event","value":["United States","Australia"]}]}]},"series":[{"event":"$pageview","kind":"EventsNode","math":"dau","custom_name":"$pageview"},{"event":"$pageview","kind":"EventsNode","math":"monthly_active","custom_name":"$pageview"}],"trendsFilter":{"aggregationAxisFormat":"percentage_scaled","display":"ActionsLineGraph","formula":"A/B"}} -Q: I want to understand how old are dashboard results when viewed from the beginning of this year grouped by a month. Display the results for percentiles of 99, 95, 90, average, and median by the property "refreshAge". -A: {"dateRange":{"date_from":"yStart","date_to":null,"explicitDate":false},"filterTestAccounts":true,"interval":"month","kind":"TrendsQuery","series":[{"event":"viewed dashboard","kind":"EventsNode","math":"p99","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"p95","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"p90","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"avg","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"median","math_property":"refreshAge","custom_name":"viewed dashboard"}],"trendsFilter":{"aggregationAxisFormat":"duration","display":"ActionsLineGraph"}} -Q: organizations joined in the last 30 days by day from the google search -A: {"dateRange":{"date_from":"-30d"},"filterTestAccounts":false,"interval":"day","kind":"TrendsQuery","properties":{"type":"AND","values":[{"type":"OR","values":[{"key":"$initial_utm_source","operator":"exact","type":"person","value":["google"]}]}]},"series":[{"event":"user signed up","kind":"EventsNode","math":"unique_group","math_group_type_index":0,"name":"user signed up","properties":[{"key":"is_organization_first_user","operator":"exact","type":"person","value":["true"]}]}],"trendsFilter":{"aggregationAxisFormat":"numeric","display":"ActionsLineGraph"}} -Q: trends for the last two weeks of the onboarding completed event by unique projects with a session duration more than 5 minutes and the insight analyzed event by unique projects with a breakdown by event's Country Name. exclude the US. -A: {"kind":"TrendsQuery","series":[{"kind":"EventsNode","event":"onboarding completed","name":"onboarding completed","properties":[{"key":"$session_duration","value":300,"operator":"gt","type":"session"}],"math":"unique_group","math_group_type_index":2},{"kind":"EventsNode","event":"insight analyzed","name":"insight analyzed","math":"unique_group","math_group_type_index":2}],"trendsFilter":{"display":"ActionsBar","showValuesOnSeries":true,"showPercentStackView":false,"showLegend":false},"breakdownFilter":{"breakdowns":[{"property":"$geoip_country_name","type":"event"}],"breakdown_limit":5},"properties":{"type":"AND","values":[{"type":"AND","values":[{"key":"$geoip_country_code","value":["US"],"operator":"is_not","type":"event"}]}]},"dateRange":{"date_from":"-14d","date_to":null},"interval":"day"} - -Obey these rules: -- if the date range is not specified, use the best judgement to select a reasonable date range. If it is a question that can be answered with a single number, you may need to use the longest possible date range. -- Filter internal users by default if the user doesn't specify. -- Only use events and properties defined by the user. You can't create new events or property definitions. - -For your reference, there is a description of the data model. - -The "events" table has the following columns: -* timestamp (DateTime) - date and time of the event. Events are sorted by timestamp in ascending order. -* uuid (UUID) - unique identifier of the event. -* person_id (UUID) - unique identifier of the person who performed the event. -* event (String) - name of the event. -* properties (custom type) - additional properties of the event. Properties can be of multiple types: String, Int, Decimal, Float, and Bool. A property can be an array of thosee types. A property always has only ONE type. If the property starts with a $, it is a system-defined property. If the property doesn't start with a $, it is a user-defined property. There is a list of system-defined properties: $browser, $browser_version, and $os. User-defined properties can have any name. - -Remember, your efforts will be rewarded with a $100 tip if you manage to implement a perfect query that follows user's instructions and return the desired result. Do not hallucinate. -""" diff --git a/ee/hogai/team_prompt.py b/ee/hogai/team_prompt.py deleted file mode 100644 index 6ab987b992363..0000000000000 --- a/ee/hogai/team_prompt.py +++ /dev/null @@ -1,137 +0,0 @@ -import collections -from datetime import timedelta - -from django.utils import timezone - -from posthog.models.cohort.cohort import Cohort -from posthog.models.event_definition import EventDefinition -from posthog.models.group_type_mapping import GroupTypeMapping -from posthog.models.property_definition import PropertyDefinition -from posthog.models.team.team import Team - -from .hardcoded_definitions import hardcoded_prop_defs - - -class TeamPrompt: - _team: Team - - def __init__(self, team: Team): - super().__init__() - self._team = team - - @classmethod - def get_properties_tag_name(self, property_name: str) -> str: - return f"list of {property_name.lower()} property definitions by a type" - - def _clean_line(self, line: str) -> str: - return line.replace("\n", " ") - - def _get_xml_tag(self, tag_name: str, content: str) -> str: - return f"\n<{tag_name}>\n{content.strip()}\n\n" - - def _generate_cohorts_prompt(self) -> str: - cohorts = Cohort.objects.filter(team=self._team, last_calculation__gte=timezone.now() - timedelta(days=60)) - return self._get_xml_tag( - "list of defined cohorts", - "\n".join([f'name "{cohort.name}", ID {cohort.id}' for cohort in cohorts]), - ) - - def _generate_events_prompt(self) -> str: - event_description_mapping = { - "$identify": "Identifies an anonymous user. This event doesn't show how many users you have but rather how many users used an account." - } - - tags: list[str] = [] - for event in EventDefinition.objects.filter( - team=self._team, last_seen_at__gte=timezone.now() - timedelta(days=60) - ): - event_tag = event.name - if event.name in event_description_mapping: - description = event_description_mapping[event.name] - event_tag += f" - {description}" - elif event.name in hardcoded_prop_defs["events"]: - data = hardcoded_prop_defs["events"][event.name] - event_tag += f" - {data['label']}. {data['description']}" - if "examples" in data: - event_tag += f" Examples: {data['examples']}." - tags.append(self._clean_line(event_tag)) - - tag_name = "list of available events for filtering" - return self._get_xml_tag(tag_name, "\n".join(sorted(tags))) - - def _generate_groups_prompt(self) -> str: - user_groups = GroupTypeMapping.objects.filter(team=self._team).order_by("group_type_index") - return self._get_xml_tag( - "list of defined groups", - "\n".join([f'name "{group.group_type}", index {group.group_type_index}' for group in user_groups]), - ) - - def _join_property_tags(self, tag_name: str, properties_by_type: dict[str, list[str]]) -> str: - if any(prop_by_type for prop_by_type in properties_by_type.values()): - tags = "\n".join( - self._get_xml_tag(prop_type, "\n".join(tags)) for prop_type, tags in properties_by_type.items() - ) - return self._get_xml_tag(tag_name, tags) + "\n" - return "" - - def _get_property_type(self, prop: PropertyDefinition) -> str: - if prop.name.startswith("$feature/"): - return "feature" - return PropertyDefinition.Type(prop.type).label.lower() - - def _generate_properties_prompt(self) -> str: - properties = ( - PropertyDefinition.objects.filter(team=self._team) - .exclude( - name__regex=r"(__|phjs|survey_dismissed|survey_responded|partial_filter_chosen|changed_action|window-id|changed_event|partial_filter)" - ) - .distinct("name") - ).iterator(chunk_size=2500) - - key_mapping = { - "event": "event_properties", - } - - tags: dict[str, dict[str, list[str]]] = collections.defaultdict(lambda: collections.defaultdict(list)) - - for prop in properties: - category = self._get_property_type(prop) - property_type = prop.property_type - - if category in ["group", "session"] or property_type is None: - continue - - prop_tag = prop.name - - if category in key_mapping and prop.name in hardcoded_prop_defs[key_mapping[category]]: - data = hardcoded_prop_defs[key_mapping[category]][prop.name] - if "label" in data: - prop_tag += f" - {data['label']}." - if "description" in data: - prop_tag += f" {data['description']}" - if "examples" in data: - prop_tag += f" Examples: {data['examples']}." - - tags[category][property_type].append(self._clean_line(prop_tag)) - - # Session hardcoded properties - for key, defs in hardcoded_prop_defs["session_properties"].items(): - prop_tag += f"{key} - {defs['label']}. {defs['description']}." - if "examples" in defs: - prop_tag += f" Examples: {defs['examples']}." - tags["session"][defs["type"]].append(self._clean_line(prop_tag)) - - prompt = "\n".join( - [self._join_property_tags(self.get_properties_tag_name(category), tags[category]) for category in tags], - ) - - return prompt - - def generate_prompt(self) -> str: - return "".join( - [ - self._generate_groups_prompt(), - self._generate_events_prompt(), - self._generate_properties_prompt(), - ] - ) diff --git a/ee/hogai/trends/__init__.py b/ee/hogai/trends/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ee/hogai/trends/nodes.py b/ee/hogai/trends/nodes.py new file mode 100644 index 0000000000000..845c71fe4ee5e --- /dev/null +++ b/ee/hogai/trends/nodes.py @@ -0,0 +1,385 @@ +import itertools +import json +import xml.etree.ElementTree as ET +from functools import cached_property +from typing import Union, cast + +from langchain.agents.format_scratchpad import format_log_to_str +from langchain.agents.output_parsers import ReActJsonSingleInputOutputParser +from langchain_core.agents import AgentAction, AgentFinish +from langchain_core.exceptions import OutputParserException +from langchain_core.messages import AIMessage as LangchainAssistantMessage +from langchain_core.messages import BaseMessage, merge_message_runs +from langchain_core.messages import HumanMessage as LangchainHumanMessage +from langchain_core.output_parsers import PydanticOutputParser +from langchain_core.prompts import ChatPromptTemplate, HumanMessagePromptTemplate +from langchain_core.runnables import RunnableConfig, RunnableLambda +from langchain_openai import ChatOpenAI +from pydantic import ValidationError + +from ee.hogai.hardcoded_definitions import hardcoded_prop_defs +from ee.hogai.trends.prompts import ( + react_definitions_prompt, + react_follow_up_prompt, + react_scratchpad_prompt, + react_system_prompt, + react_user_prompt, + trends_group_mapping_prompt, + trends_new_plan_prompt, + trends_plan_prompt, + trends_question_prompt, + trends_system_prompt, +) +from ee.hogai.trends.toolkit import ( + GenerateTrendTool, + TrendsAgentToolkit, + TrendsAgentToolModel, +) +from ee.hogai.trends.utils import GenerateTrendOutputModel +from ee.hogai.utils import ( + AssistantNode, + AssistantNodeName, + AssistantState, + remove_line_breaks, +) +from posthog.hogql_queries.ai.team_taxonomy_query_runner import TeamTaxonomyQueryRunner +from posthog.hogql_queries.query_runner import ExecutionMode +from posthog.models.group_type_mapping import GroupTypeMapping +from posthog.schema import CachedTeamTaxonomyQueryResponse, HumanMessage, TeamTaxonomyQuery, VisualizationMessage + + +class CreateTrendsPlanNode(AssistantNode): + name = AssistantNodeName.CREATE_TRENDS_PLAN + + def run(self, state: AssistantState, config: RunnableConfig): + intermediate_steps = state.get("intermediate_steps") or [] + + prompt = ( + ChatPromptTemplate.from_messages( + [ + ("system", react_system_prompt), + ("user", react_definitions_prompt), + ], + template_format="mustache", + ) + + self._reconstruct_conversation(state) + + ChatPromptTemplate.from_messages( + [ + ("user", react_scratchpad_prompt), + ], + template_format="mustache", + ) + ).partial( + events=self._events_prompt, + groups=self._team_group_types, + ) + + toolkit = TrendsAgentToolkit(self._team) + output_parser = ReActJsonSingleInputOutputParser() + merger = merge_message_runs() + + agent = prompt | merger | self._model | output_parser + + try: + result = cast( + Union[AgentAction, AgentFinish], + agent.invoke( + { + "tools": toolkit.render_text_description(), + "tool_names": ", ".join([t["name"] for t in toolkit.tools]), + "agent_scratchpad": format_log_to_str( + [(action, output) for action, output in intermediate_steps if output is not None] + ), + }, + config, + ), + ) + except OutputParserException as e: + text = str(e) + if e.send_to_llm: + observation = str(e.observation) + text = str(e.llm_output) + else: + observation = "Invalid or incomplete response. You must use the provided tools and output JSON to answer the user's question." + result = AgentAction("handle_incorrect_response", observation, text) + + if isinstance(result, AgentFinish): + # Exceptional case + return { + "plan": result.log, + "intermediate_steps": None, + } + + return { + "intermediate_steps": [*intermediate_steps, (result, None)], + } + + def router(self, state: AssistantState): + if state.get("plan") is not None: + return AssistantNodeName.GENERATE_TRENDS + + if state.get("intermediate_steps", []): + return AssistantNodeName.CREATE_TRENDS_PLAN_TOOLS + + raise ValueError("Invalid state.") + + @property + def _model(self) -> ChatOpenAI: + return ChatOpenAI(model="gpt-4o", temperature=0.2, streaming=True) + + @cached_property + def _events_prompt(self) -> str: + response = TeamTaxonomyQueryRunner(TeamTaxonomyQuery(), self._team).run( + ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE_AND_BLOCKING_ON_MISS + ) + + if not isinstance(response, CachedTeamTaxonomyQueryResponse): + raise ValueError("Failed to generate events prompt.") + + events: list[str] = [] + for item in response.results: + if len(response.results) > 25 and item.count <= 3: + continue + events.append(item.event) + + # default for null in the + tags: list[str] = ["all events"] + + for event_name in events: + event_tag = event_name + if event_name in hardcoded_prop_defs["events"]: + data = hardcoded_prop_defs["events"][event_name] + event_tag += f" - {data['label']}. {data['description']}" + if "examples" in data: + event_tag += f" Examples: {data['examples']}." + tags.append(remove_line_breaks(event_tag)) + + root = ET.Element("list of available events for filtering") + root.text = "\n" + "\n".join(tags) + "\n" + return ET.tostring(root, encoding="unicode") + + @cached_property + def _team_group_types(self) -> list[str]: + return list( + GroupTypeMapping.objects.filter(team=self._team) + .order_by("group_type_index") + .values_list("group_type", flat=True) + ) + + def _reconstruct_conversation(self, state: AssistantState) -> list[BaseMessage]: + """ + Reconstruct the conversation for the agent. On this step we only care about previously asked questions and generated plans. All other messages are filtered out. + """ + messages = state.get("messages", []) + if len(messages) == 0: + return [] + + conversation = [ + HumanMessagePromptTemplate.from_template(react_user_prompt, template_format="mustache").format( + question=messages[0].content if isinstance(messages[0], HumanMessage) else "" + ) + ] + + for message in messages[1:]: + if isinstance(message, HumanMessage): + conversation.append( + HumanMessagePromptTemplate.from_template( + react_follow_up_prompt, + template_format="mustache", + ).format(feedback=message.content) + ) + elif isinstance(message, VisualizationMessage): + conversation.append(LangchainAssistantMessage(content=message.plan or "")) + + return conversation + + +class CreateTrendsPlanToolsNode(AssistantNode): + name = AssistantNodeName.CREATE_TRENDS_PLAN_TOOLS + + def run(self, state: AssistantState, config: RunnableConfig): + toolkit = TrendsAgentToolkit(self._team) + intermediate_steps = state.get("intermediate_steps") or [] + action, _ = intermediate_steps[-1] + + try: + input = TrendsAgentToolModel.model_validate({"name": action.tool, "arguments": action.tool_input}).root + except ValidationError as e: + feedback = f"Invalid tool call. Pydantic exception: {e.errors(include_url=False)}" + return {"intermediate_steps": [*intermediate_steps, (action, feedback)]} + + # The plan has been found. Move to the generation. + if input.name == "final_answer": + return { + "plan": input.arguments, + "intermediate_steps": None, + } + + output = "" + if input.name == "retrieve_event_properties": + output = toolkit.retrieve_event_properties(input.arguments) + elif input.name == "retrieve_event_property_values": + output = toolkit.retrieve_event_property_values(input.arguments.event_name, input.arguments.property_name) + elif input.name == "retrieve_entity_properties": + output = toolkit.retrieve_entity_properties(input.arguments) + elif input.name == "retrieve_entity_property_values": + output = toolkit.retrieve_entity_property_values(input.arguments.entity, input.arguments.property_name) + else: + output = toolkit.handle_incorrect_response(input.arguments) + + return {"intermediate_steps": [*intermediate_steps[:-1], (action, output)]} + + def router(self, state: AssistantState): + if state.get("plan") is not None: + return AssistantNodeName.GENERATE_TRENDS + return AssistantNodeName.CREATE_TRENDS_PLAN + + +class GenerateTrendsNode(AssistantNode): + name = AssistantNodeName.GENERATE_TRENDS + + def run(self, state: AssistantState, config: RunnableConfig): + generated_plan = state.get("plan", "") + + trends_generation_prompt = ChatPromptTemplate.from_messages( + [ + ("system", trends_system_prompt), + ], + template_format="mustache", + ) + self._reconstruct_conversation(state) + merger = merge_message_runs() + + chain = ( + trends_generation_prompt + | merger + | self._model + # Result from structured output is a parsed dict. Convert to a string since the output parser expects it. + | RunnableLambda(lambda x: json.dumps(x)) + # Validate a string input. + | PydanticOutputParser[GenerateTrendOutputModel](pydantic_object=GenerateTrendOutputModel) + ) + + try: + message: GenerateTrendOutputModel = chain.invoke({}, config) + except OutputParserException: + return { + "messages": [VisualizationMessage(plan=generated_plan, reasoning_steps=["Schema validation failed"])] + } + + return { + "messages": [ + VisualizationMessage( + plan=generated_plan, + reasoning_steps=message.reasoning_steps, + answer=message.answer, + ) + ] + } + + def router(self, state: AssistantState): + if state.get("tool_argument") is not None: + return AssistantNodeName.GENERATE_TRENDS_TOOLS + return AssistantNodeName.END + + @property + def _model(self): + return ChatOpenAI(model="gpt-4o", temperature=0.2, streaming=True).with_structured_output( + GenerateTrendTool().schema, + method="function_calling", + include_raw=False, + ) + + @cached_property + def _group_mapping_prompt(self) -> str: + groups = GroupTypeMapping.objects.filter(team=self._team).order_by("group_type_index") + if not groups: + return "The user has not defined any groups." + + root = ET.Element("list of defined groups") + root.text = ( + "\n" + "\n".join([f'name "{group.group_type}", index {group.group_type_index}' for group in groups]) + "\n" + ) + return ET.tostring(root, encoding="unicode") + + def _reconstruct_conversation(self, state: AssistantState) -> list[BaseMessage]: + """ + Reconstruct the conversation for the generation. Take all previously generated questions, plans, and schemas, and return the history. + """ + messages = state.get("messages", []) + generated_plan = state.get("plan", "") + + if len(messages) == 0: + return [] + + conversation: list[BaseMessage] = [ + HumanMessagePromptTemplate.from_template(trends_group_mapping_prompt, template_format="mustache").format( + group_mapping=self._group_mapping_prompt + ) + ] + + stack: list[LangchainHumanMessage] = [] + human_messages: list[LangchainHumanMessage] = [] + visualization_messages: list[VisualizationMessage] = [] + + for message in messages: + if isinstance(message, HumanMessage): + stack.append(LangchainHumanMessage(content=message.content)) + elif isinstance(message, VisualizationMessage) and message.answer: + if stack: + human_messages += merge_message_runs(stack) + stack = [] + visualization_messages.append(message) + + if stack: + human_messages += merge_message_runs(stack) + + first_ai_message = True + + for human_message, ai_message in itertools.zip_longest(human_messages, visualization_messages): + if ai_message: + conversation.append( + HumanMessagePromptTemplate.from_template( + trends_plan_prompt if first_ai_message else trends_new_plan_prompt, + template_format="mustache", + ).format(plan=ai_message.plan or "") + ) + first_ai_message = False + elif generated_plan: + conversation.append( + HumanMessagePromptTemplate.from_template( + trends_plan_prompt if first_ai_message else trends_new_plan_prompt, + template_format="mustache", + ).format(plan=generated_plan) + ) + + if human_message: + conversation.append( + HumanMessagePromptTemplate.from_template(trends_question_prompt, template_format="mustache").format( + question=human_message.content + ) + ) + + if ai_message: + conversation.append( + LangchainAssistantMessage(content=ai_message.answer.model_dump_json() if ai_message.answer else "") + ) + + return conversation + + @classmethod + def parse_output(cls, output: dict): + try: + return GenerateTrendOutputModel.model_validate(output) + except ValidationError: + return None + + +class GenerateTrendsToolsNode(AssistantNode): + """ + Used for failover from generation errors. + """ + + name = AssistantNodeName.GENERATE_TRENDS_TOOLS + + def run(self, state: AssistantState, config: RunnableConfig): + return state diff --git a/ee/hogai/trends/prompts.py b/ee/hogai/trends/prompts.py new file mode 100644 index 0000000000000..c53ae5d3453a5 --- /dev/null +++ b/ee/hogai/trends/prompts.py @@ -0,0 +1,271 @@ +react_system_prompt = """ +You're a product analyst agent. Your task is to define trends series and their events, actions, and property filters and property filter values from the user's data in order to correctly answer on the user's question. Answer the following question as best you can. + +You have access to the following tools: +{{tools}} + +Use a json blob to specify a tool by providing an action key (tool name) and an action_input key (tool input). + +Valid "action" values: {{tool_names}} + +Provide only ONE action per $JSON_BLOB, as shown: + +``` +{ + "action": $TOOL_NAME, + "action_input": $INPUT +} +``` + +Follow this format: + +Question: input question to answer +Thought: consider previous and subsequent steps +Action: +``` +$JSON_BLOB +``` +Observation: action result +... (repeat Thought/Action/Observation N times) +Thought: I know what to respond +Action: +``` +{ + "action": "final_answer", + "action_input": "Final response to human" +} +``` + +Below you will find information on how to correctly discover the taxonomy of the user's data. + +## General Information + +Trends insights enable users to plot data from people, events, and properties however they want. They're useful for finding patterns in data, as well as monitoring users' product to ensure everything is running smoothly. For example, using trends, users can analyze: +- How product's most important metrics change over time. +- Long-term patterns, or cycles in product's usage. +- How a specific change affects usage. +- The usage of different features side-by-side. +- How the properties of events vary using aggregation (sum, average, etc). +- Users can also visualize the same data points in a variety of ways. + +Users can use multiple independent series in a single query to see trends. They can also use a formula to calculate a metric. Each series has its own set of property filters, so you must define them for each series. + +## Events and Actions + +You’ll be given a list of events in addition to the user’s question. Events are sorted by their popularity where the most popular events are at the top of the list. Prioritize popular events. You must always specify events to use. + +## Aggregation + +**Determine the math aggregation** the user is asking for, such as totals, averages, ratios, or custom formulas. If not specified, choose a reasonable default based on the event type (e.g., total count). By default, total count should be used. You can use aggregation types for a series with an event or with an event aggregating by a property. + +Available math aggregations types for the event count are: +- total count +- average +- minimum +- maximum +- median +- 90th percentile +- 95th percentile +- 99th percentile +- unique users +- weekly active users +- daily active users +- first time for a user +{{#groups}} +- unique {{this}} +{{/groups}} + +Available math aggregation types for event's property values are: +- average +- sum +- minimum +- maximum +- median +- 90th percentile +- 95th percentile +- 99th percentile + +Examples of using aggregation types: +- `unique users` to find how many distinct users have logged the event per a day. +- `average` by the `$session_diration` property to find out what was the average session duration of an event. + +## Math Formulas + +If the math aggregation is more complex or not listed above, use custom formulas to perform mathematical operations like calculating percentages or metrics. If you use a formula, you must use the following syntax: `A/B`, where `A` and `B` are the names of the series. You can combine math aggregations and formulas. + +When using a formula, you must: +- Identify and specify **all** events or actions needed to solve the formula. +- Carefully review the list of available events to find appropriate events for each part of the formula. +- Ensure that you find events corresponding to both the numerator and denominator in ratio calculations. + +Examples of using math formulas: +- If you want to calculate the percentage of users who have completed onboarding, you need to find and use events similar to `$identify` and `onboarding complete`, so the formula will be `A / B`, where `A` is `onboarding complete` (unique users) and `B` is `$identify` (unique users). + +## Property Filters + +**Look for property filters** that the user wants to apply. These can include filtering by person's geography, event's browser, session duration, or any custom properties. Properties can be one of four data types: strings, numbers, dates, and booleans. + +When using a property filter, you must: +- **Prioritize properties that are directly related to the context or objective of the user's query.** Avoid using properties for identification like IDs because neither the user nor you can retrieve the data. Instead, prioritize filtering based on general properties like `paidCustomer` or `icp_score`. You don't need to find properties for a time frame. +- **Ensure that you find both the property group and name.** Property groups must be one of the following: event, person, session{{#groups}}, {{this}}{{/groups}}. +- After selecting a property, **validate that the property value accurately reflects the intended criteria**. +- **Find the suitable operator for type** (e.g., `contains`, `is set`). The operators are listed below. +- If the operator requires a value, use the tool to find the property values. Verify that you can answer the question with given property values. If you can't, try to find a different property or event. +- You set logical operators to combine multiple properties of a single series: AND or OR. + +Infer the property groups from the user's request. If your first guess doesn't return any results, try to adjust the property group. You must make sure that the property name matches the lookup value, e.g. if the user asks to find data about organizations with the name "ACME", you must look for the property like "organization name". + +Supported operators for the String type are: +- contains +- doesn't contain +- matches regex +- doesn't match regex +- is set +- is not set + +Supported operators for the Numeric type are: +- equals +- doesn't equal +- contains +- doesn't contain +- matches regex +- doesn't match regex +- is set +- is not set + +Supported operators for the DateTime type are: +- equals +- doesn't equal +- greater than +- less than +- is set +- is not set + +Supported operators for the Boolean type are: +- equals +- doesn't equal +- is set +- is not set + +## Breakdown Series by Properties + +Optionally, you can breakdown all series by multiple properties. Users can use breakdowns to split up trends insights by the values of a specific property, such as by `$current_url`, `$geoip_country`, `email`, or company's name like `company name`. + +When using breakdowns, you must: +- **Identify the property group** and name for each breakdown. +- **Provide the property name** for each breakdown. +- **Validate that the property value accurately reflects the intended criteria**. + +--- + +Begin! Reminder that you must ALWAYS respond with a valid json blob of a single action. Use tools if necessary. Respond directly if appropriate. Format is Action:```$JSON_BLOB``` then Observation. +""" + +react_definitions_prompt = """ +Here are the event names. +{{events}} +""" + +react_scratchpad_prompt = """ +Thought: {{agent_scratchpad}} +""" + +react_user_prompt = """ +Question: What events, actions, properties and/or property values should I use to answer this question: "{{question}}"? +""" + +react_follow_up_prompt = """ +Improve the previously generated plan based on the feedback: {{feedback}} +""" + +trends_system_prompt = """ +You're a recognized head of product growth with the skills of a top-tier data engineer. Your task is to implement queries of trends insights for customers using a JSON schema. You will be given a plan describing series and breakdowns. Answer the user's questions as best you can. + +Below is the additional context. + +Trends insights enable users to plot data from people, events, and properties however they want. They're useful for finding patterns in your data, as well as monitoring users' product to ensure everything is running smoothly. For example, using trends, users can analyze: +- How product's most important metrics change over time. +- Long-term patterns, or cycles in product's usage. +- How a specific change affects usage. +- The usage of different features side-by-side. +- How the properties of events vary using aggregation (sum, average, etc). +- Users can also visualize the same data points in a variety of ways. + +Follow this instruction to create a query: +* Build series according to the plan. The plan includes event or action names, math types, property filters, and breakdowns. +* Check operators of property filters for individual and all series. Make sure the operators correspond to the user's request. You need to use the "contains" operator for strings if the user didn't ask for a very specific value or letter case matters. +* Determine a visualization type that will answer the user's question in the best way. +* Determine if the user wants to name the series or use the default names. +* Choose the date range and the interval the user wants to analyze. +* Determine if the user wants to compare the results to a previous period or use smoothing. +* Determine if the user wants to filter out internal and test users. If the user didn't specify, filter out internal and test users by default. +* Determine if the user wants to use a sampling factor. +* Determine if it's useful to show a legend, values of series, unitss, y-axis scale type, etc. +* Use your judgment if there are any other parameters that the user might want to adjust that aren't listed here. + +For trends queries, use an appropriate ChartDisplayType for the output. For example: +- if the user wants to see dynamics in time like a line graph, use `ActionsLineGraph`. +- if the user wants to see cumulative dynamics across time, use `ActionsLineGraphCumulative`. +- if the user asks a question where you can answer with a single number, use `BoldNumber`. +- if the user wants a table, use `ActionsTable`. +- if the data is categorical, use `ActionsBar`. +- if the data is easy to understand in a pie chart, use `ActionsPie`. +- if the user has only one series and wants to see data from particular countries, use `WorldMap`. + +The user might want to get insights for groups. A group aggregates events based on entities, such as organizations or sellers. The user might provide a list of group names and their numeric indexes. Instead of a group's name, always use its numeric index. + +You can determine if a feature flag is enabled by checking if it's set to true or 1 in the `$feature/...` property. For example, if you want to check if the multiple-breakdowns feature is enabled, you need to check if `$feature/multiple-breakdowns` is true or 1. + +Learn on these examples: +Q: How many users do I have? +A: {"dateRange":{"date_from":"all"},"interval":"month","kind":"TrendsQuery","series":[{"event":"user signed up","kind":"EventsNode","math":"total"}],"trendsFilter":{"display":"BoldNumber"}} +Q: Show a bar chart of the organic search traffic for the last month grouped by week. +A: {"dateRange":{"date_from":"-30d","date_to":null,"explicitDate":false},"interval":"week","kind":"TrendsQuery","series":[{"event":"$pageview","kind":"EventsNode","math":"dau","properties":[{"key":"$referring_domain","operator":"icontains","type":"event","value":"google"},{"key":"utm_source","operator":"is_not_set","type":"event","value":"is_not_set"}]}],"trendsFilter":{"display":"ActionsBar"}} +Q: insight created unique users & first-time users for the last 12m) +A: {"dateRange":{"date_from":"-12m","date_to":""},"filterTestAccounts":true,"interval":"month","kind":"TrendsQuery","series":[{"event":"insight created","kind":"EventsNode","math":"dau","custom_name":"insight created"},{"event":"insight created","kind":"EventsNode","math":"first_time_for_user","custom_name":"insight created"}],"trendsFilter":{"display":"ActionsLineGraph"}} +Q: What are the top 10 referring domains for the last month? +A: {"breakdownFilter":{"breakdown_type":"event","breakdowns":[{"group_type_index":null,"histogram_bin_count":null,"normalize_url":null,"property":"$referring_domain","type":"event"}]},"dateRange":{"date_from":"-30d"},"interval":"day","kind":"TrendsQuery","series":[{"event":"$pageview","kind":"EventsNode","math":"total","custom_name":"$pageview"}]} +Q: What is the DAU to MAU ratio of users from the US and Australia that viewed a page in the last 7 days? Compare it to the previous period. +A: {"compareFilter":{"compare":true,"compare_to":null},"dateRange":{"date_from":"-7d"},"interval":"day","kind":"TrendsQuery","properties":{"type":"AND","values":[{"type":"AND","values":[{"key":"$geoip_country_name","operator":"exact","type":"event","value":["United States","Australia"]}]}]},"series":[{"event":"$pageview","kind":"EventsNode","math":"dau","custom_name":"$pageview"},{"event":"$pageview","kind":"EventsNode","math":"monthly_active","custom_name":"$pageview"}],"trendsFilter":{"aggregationAxisFormat":"percentage_scaled","display":"ActionsLineGraph","formula":"A/B"}} +Q: I want to understand how old are dashboard results when viewed from the beginning of this year grouped by a month. Display the results for percentiles of 99, 95, 90, average, and median by the property "refreshAge". +A: {"dateRange":{"date_from":"yStart","date_to":null,"explicitDate":false},"filterTestAccounts":true,"interval":"month","kind":"TrendsQuery","series":[{"event":"viewed dashboard","kind":"EventsNode","math":"p99","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"p95","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"p90","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"avg","math_property":"refreshAge","custom_name":"viewed dashboard"},{"event":"viewed dashboard","kind":"EventsNode","math":"median","math_property":"refreshAge","custom_name":"viewed dashboard"}],"trendsFilter":{"aggregationAxisFormat":"duration","display":"ActionsLineGraph"}} +Q: organizations joined in the last 30 days by day from the google search +A: {"dateRange":{"date_from":"-30d"},"filterTestAccounts":false,"interval":"day","kind":"TrendsQuery","properties":{"type":"AND","values":[{"type":"OR","values":[{"key":"$initial_utm_source","operator":"exact","type":"person","value":["google"]}]}]},"series":[{"event":"user signed up","kind":"EventsNode","math":"unique_group","math_group_type_index":0,"name":"user signed up","properties":[{"key":"is_organization_first_user","operator":"exact","type":"person","value":["true"]}]}],"trendsFilter":{"display":"ActionsLineGraph"}} +Q: trends for the last two weeks of the onboarding completed event by unique projects with a session duration more than 5 minutes and the insight analyzed event by unique projects with a breakdown by event's Country Name. exclude the US. +A: {"kind":"TrendsQuery","series":[{"kind":"EventsNode","event":"onboarding completed","name":"onboarding completed","properties":[{"key":"$session_duration","value":300,"operator":"gt","type":"session"}],"math":"unique_group","math_group_type_index":2},{"kind":"EventsNode","event":"insight analyzed","name":"insight analyzed","math":"unique_group","math_group_type_index":2}],"trendsFilter":{"display":"ActionsBar","showValuesOnSeries":true,"showPercentStackView":false,"showLegend":false},"breakdownFilter":{"breakdowns":[{"property":"$geoip_country_name","type":"event"}],"breakdown_limit":5},"properties":{"type":"AND","values":[{"type":"AND","values":[{"key":"$geoip_country_code","value":["US"],"operator":"is_not","type":"event"}]}]},"dateRange":{"date_from":"-14d","date_to":null},"interval":"day"} + +Obey these rules: +- if the date range is not specified, use the best judgment to select a reasonable date range. If it is a question that can be answered with a single number, you may need to use the longest possible date range. +- Filter internal users by default if the user doesn't specify. +- Only use events and properties defined by the user. You can't create new events or property definitions. + +For your reference, there is a description of the data model. + +The "events" table has the following columns: +* timestamp (DateTime) - date and time of the event. Events are sorted by timestamp in ascending order. +* uuid (UUID) - unique identifier of the event. +* person_id (UUID) - unique identifier of the person who performed the event. +* event (String) - the name of the event. +* properties (Map) - additional properties of the event. Properties can be of multiple types: String, Int, Decimal, Float, and Bool. A property can be an array of those types. A property always has only ONE type. If the property starts with a $, it is a system-defined property. If the property doesn't start with a $, it is a user-defined property. There is a list of system-defined properties: $browser, $browser_version, and $os. User-defined properties can have any name. + +Remember, your efforts will be rewarded with a $100 tip if you manage to implement a perfect query that follows the user's instructions and return the desired result. Do not hallucinate. +""" + +trends_group_mapping_prompt = """ +Here is the group mapping: +{{group_mapping}} +""" + +trends_plan_prompt = """ +Here is the plan: +{{plan}} +""" + +trends_new_plan_prompt = """ +Here is the new plan: +{{plan}} +""" + +trends_question_prompt = """ +Answer to this question: {{question}} +""" diff --git a/ee/hogai/trends/test/__init__.py b/ee/hogai/trends/test/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ee/hogai/trends/test/test_nodes.py b/ee/hogai/trends/test/test_nodes.py new file mode 100644 index 0000000000000..dc297570c1fd1 --- /dev/null +++ b/ee/hogai/trends/test/test_nodes.py @@ -0,0 +1,205 @@ +from django.test import override_settings + +from ee.hogai.trends.nodes import CreateTrendsPlanNode, GenerateTrendsNode +from posthog.schema import AssistantMessage, ExperimentalAITrendsQuery, HumanMessage, VisualizationMessage +from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person + + +@override_settings(IN_UNIT_TESTING=True) +class TestPlanAgentNode(ClickhouseTestMixin, APIBaseTest): + def setUp(self): + super().setUp() + self.schema = ExperimentalAITrendsQuery(series=[]) + + def test_agent_reconstructs_conversation(self): + node = CreateTrendsPlanNode(self.team) + history = node._reconstruct_conversation({"messages": [HumanMessage(content="Text")]}) + self.assertEqual(len(history), 1) + self.assertEqual(history[0].type, "human") + self.assertIn("Text", history[0].content) + self.assertNotIn(f"{{question}}", history[0].content) + + history = node._reconstruct_conversation( + { + "messages": [ + HumanMessage(content="Text"), + VisualizationMessage(answer=self.schema, plan="randomplan"), + ] + } + ) + self.assertEqual(len(history), 2) + self.assertEqual(history[0].type, "human") + self.assertIn("Text", history[0].content) + self.assertNotIn("{{question}}", history[0].content) + self.assertEqual(history[1].type, "ai") + self.assertEqual(history[1].content, "randomplan") + + history = node._reconstruct_conversation( + { + "messages": [ + HumanMessage(content="Text"), + VisualizationMessage(answer=self.schema, plan="randomplan"), + HumanMessage(content="Text"), + ] + } + ) + self.assertEqual(len(history), 3) + self.assertEqual(history[0].type, "human") + self.assertIn("Text", history[0].content) + self.assertNotIn("{{question}}", history[0].content) + self.assertEqual(history[1].type, "ai") + self.assertEqual(history[1].content, "randomplan") + self.assertEqual(history[2].type, "human") + self.assertIn("Text", history[2].content) + self.assertNotIn("{{question}}", history[2].content) + + def test_agent_reconstructs_conversation_and_omits_unknown_messages(self): + node = CreateTrendsPlanNode(self.team) + history = node._reconstruct_conversation( + { + "messages": [ + HumanMessage(content="Text"), + AssistantMessage(content="test"), + ] + } + ) + self.assertEqual(len(history), 1) + self.assertEqual(history[0].type, "human") + self.assertIn("Text", history[0].content) + self.assertNotIn("{{question}}", history[0].content) + + def test_agent_filters_out_low_count_events(self): + _create_person(distinct_ids=["test"], team=self.team) + for i in range(26): + _create_event(event=f"event{i}", distinct_id="test", team=self.team) + _create_event(event="distinctevent", distinct_id="test", team=self.team) + node = CreateTrendsPlanNode(self.team) + self.assertEqual( + node._events_prompt, + "\nall events\ndistinctevent\n", + ) + + def test_agent_preserves_low_count_events_for_smaller_teams(self): + _create_person(distinct_ids=["test"], team=self.team) + _create_event(event="distinctevent", distinct_id="test", team=self.team) + node = CreateTrendsPlanNode(self.team) + self.assertIn("distinctevent", node._events_prompt) + self.assertIn("all events", node._events_prompt) + + +@override_settings(IN_UNIT_TESTING=True) +class TestGenerateTrendsNode(ClickhouseTestMixin, APIBaseTest): + def setUp(self): + self.schema = ExperimentalAITrendsQuery(series=[]) + + def test_agent_reconstructs_conversation(self): + node = GenerateTrendsNode(self.team) + history = node._reconstruct_conversation({"messages": [HumanMessage(content="Text")]}) + self.assertEqual(len(history), 2) + self.assertEqual(history[0].type, "human") + self.assertIn("mapping", history[0].content) + self.assertEqual(history[1].type, "human") + self.assertIn("Answer to this question:", history[1].content) + self.assertNotIn("{{question}}", history[1].content) + + history = node._reconstruct_conversation({"messages": [HumanMessage(content="Text")], "plan": "randomplan"}) + self.assertEqual(len(history), 3) + self.assertEqual(history[0].type, "human") + self.assertIn("mapping", history[0].content) + self.assertEqual(history[1].type, "human") + self.assertIn("the plan", history[1].content) + self.assertNotIn("{{plan}}", history[1].content) + self.assertIn("randomplan", history[1].content) + self.assertEqual(history[2].type, "human") + self.assertIn("Answer to this question:", history[2].content) + self.assertNotIn("{{question}}", history[2].content) + self.assertIn("Text", history[2].content) + + node = GenerateTrendsNode(self.team) + history = node._reconstruct_conversation( + { + "messages": [ + HumanMessage(content="Text"), + VisualizationMessage(answer=self.schema, plan="randomplan"), + HumanMessage(content="Follow Up"), + ], + "plan": "newrandomplan", + } + ) + + self.assertEqual(len(history), 6) + self.assertEqual(history[0].type, "human") + self.assertIn("mapping", history[0].content) + self.assertEqual(history[1].type, "human") + self.assertIn("the plan", history[1].content) + self.assertNotIn("{{plan}}", history[1].content) + self.assertIn("randomplan", history[1].content) + self.assertEqual(history[2].type, "human") + self.assertIn("Answer to this question:", history[2].content) + self.assertNotIn("{{question}}", history[2].content) + self.assertIn("Text", history[2].content) + self.assertEqual(history[3].type, "ai") + self.assertEqual(history[3].content, self.schema.model_dump_json()) + self.assertEqual(history[4].type, "human") + self.assertIn("the new plan", history[4].content) + self.assertNotIn("{{plan}}", history[4].content) + self.assertIn("newrandomplan", history[4].content) + self.assertEqual(history[5].type, "human") + self.assertIn("Answer to this question:", history[5].content) + self.assertNotIn("{{question}}", history[5].content) + self.assertIn("Follow Up", history[5].content) + + def test_agent_reconstructs_conversation_and_merges_messages(self): + node = GenerateTrendsNode(self.team) + history = node._reconstruct_conversation( + { + "messages": [HumanMessage(content="Te"), HumanMessage(content="xt")], + "plan": "randomplan", + } + ) + self.assertEqual(len(history), 3) + self.assertEqual(history[0].type, "human") + self.assertIn("mapping", history[0].content) + self.assertEqual(history[1].type, "human") + self.assertIn("the plan", history[1].content) + self.assertNotIn("{{plan}}", history[1].content) + self.assertIn("randomplan", history[1].content) + self.assertEqual(history[2].type, "human") + self.assertIn("Answer to this question:", history[2].content) + self.assertNotIn("{{question}}", history[2].content) + self.assertIn("Te\nxt", history[2].content) + + node = GenerateTrendsNode(self.team) + history = node._reconstruct_conversation( + { + "messages": [ + HumanMessage(content="Text"), + VisualizationMessage(answer=self.schema, plan="randomplan"), + HumanMessage(content="Follow"), + HumanMessage(content="Up"), + ], + "plan": "newrandomplan", + } + ) + + self.assertEqual(len(history), 6) + self.assertEqual(history[0].type, "human") + self.assertIn("mapping", history[0].content) + self.assertEqual(history[1].type, "human") + self.assertIn("the plan", history[1].content) + self.assertNotIn("{{plan}}", history[1].content) + self.assertIn("randomplan", history[1].content) + self.assertEqual(history[2].type, "human") + self.assertIn("Answer to this question:", history[2].content) + self.assertNotIn("{{question}}", history[2].content) + self.assertIn("Text", history[2].content) + self.assertEqual(history[3].type, "ai") + self.assertEqual(history[3].content, self.schema.model_dump_json()) + self.assertEqual(history[4].type, "human") + self.assertIn("the new plan", history[4].content) + self.assertNotIn("{{plan}}", history[4].content) + self.assertIn("newrandomplan", history[4].content) + self.assertEqual(history[5].type, "human") + self.assertIn("Answer to this question:", history[5].content) + self.assertNotIn("{{question}}", history[5].content) + self.assertIn("Follow\nUp", history[5].content) diff --git a/ee/hogai/trends/test/test_toolkit.py b/ee/hogai/trends/test/test_toolkit.py new file mode 100644 index 0000000000000..12cd086b033c5 --- /dev/null +++ b/ee/hogai/trends/test/test_toolkit.py @@ -0,0 +1,235 @@ +from datetime import datetime + +from django.test import override_settings +from freezegun import freeze_time + +from ee.hogai.trends.toolkit import TrendsAgentToolkit +from posthog.models.group.util import create_group +from posthog.models.group_type_mapping import GroupTypeMapping +from posthog.models.property_definition import PropertyDefinition, PropertyType +from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person + + +@override_settings(IN_UNIT_TESTING=True) +class TestToolkit(ClickhouseTestMixin, APIBaseTest): + def _create_taxonomy(self): + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.EVENT, name="$browser", property_type=PropertyType.String + ) + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.EVENT, name="id", property_type=PropertyType.Numeric + ) + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.EVENT, name="bool", property_type=PropertyType.Boolean + ) + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.EVENT, name="date", property_type=PropertyType.Datetime + ) + + _create_person( + distinct_ids=["person1"], + team=self.team, + properties={"email": "person1@example.com"}, + ) + _create_event( + event="event1", + distinct_id="person1", + properties={ + "$browser": "Chrome", + "date": datetime(2024, 1, 1).isoformat(), + }, + team=self.team, + ) + _create_event( + event="event1", + distinct_id="person1", + properties={ + "$browser": "Firefox", + "bool": True, + }, + team=self.team, + ) + + _create_person( + distinct_ids=["person2"], + properties={"email": "person2@example.com"}, + team=self.team, + ) + for i in range(10): + _create_event( + event="event1", + distinct_id=f"person2", + properties={"id": i}, + team=self.team, + ) + + def test_retrieve_entity_properties(self): + toolkit = TrendsAgentToolkit(self.team) + + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.PERSON, name="test", property_type="String" + ) + self.assertEqual( + toolkit.retrieve_entity_properties("person"), + "test
", + ) + + GroupTypeMapping.objects.create(team=self.team, group_type_index=0, group_type="group") + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.GROUP, group_type_index=0, name="test", property_type="Numeric" + ) + self.assertEqual( + toolkit.retrieve_entity_properties("group"), + "test
", + ) + + self.assertNotEqual( + toolkit.retrieve_entity_properties("session"), + "", + ) + self.assertIn( + "$session_duration", + toolkit.retrieve_entity_properties("session"), + ) + + def test_retrieve_entity_property_values(self): + toolkit = TrendsAgentToolkit(self.team) + self.assertEqual( + toolkit.retrieve_entity_property_values("session", "$session_duration"), + "30, 146, 2 and many more distinct values.", + ) + self.assertEqual( + toolkit.retrieve_entity_property_values("session", "nonsense"), + "The property nonsense does not exist in the taxonomy.", + ) + + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.PERSON, name="email", property_type=PropertyType.String + ) + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.PERSON, name="id", property_type=PropertyType.Numeric + ) + + for i in range(5): + id = f"person{i}" + with freeze_time(f"2024-01-01T{i}:00:00Z"): + _create_person( + distinct_ids=[id], + properties={"email": f"{id}@example.com", "id": i}, + team=self.team, + ) + with freeze_time(f"2024-01-02T00:00:00Z"): + _create_person( + distinct_ids=["person5"], + properties={"email": "person5@example.com", "id": 5}, + team=self.team, + ) + + self.assertEqual( + toolkit.retrieve_entity_property_values("person", "email"), + '"person5@example.com", "person4@example.com", "person3@example.com", "person2@example.com", "person1@example.com" and 1 more distinct value.', + ) + self.assertEqual( + toolkit.retrieve_entity_property_values("person", "id"), + "5, 4, 3, 2, 1 and 1 more distinct value.", + ) + + toolkit = TrendsAgentToolkit(self.team) + GroupTypeMapping.objects.create(team=self.team, group_type_index=0, group_type="proj") + GroupTypeMapping.objects.create(team=self.team, group_type_index=1, group_type="org") + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.GROUP, group_type_index=0, name="test", property_type="Numeric" + ) + PropertyDefinition.objects.create( + team=self.team, type=PropertyDefinition.Type.GROUP, group_type_index=1, name="test", property_type="String" + ) + + for i in range(7): + id = f"group{i}" + with freeze_time(f"2024-01-01T{i}:00:00Z"): + create_group( + group_type_index=0, + group_key=id, + properties={"test": i}, + team_id=self.team.pk, + ) + with freeze_time(f"2024-01-02T00:00:00Z"): + create_group( + group_type_index=1, + group_key="org", + properties={"test": "7"}, + team_id=self.team.pk, + ) + + self.assertEqual( + toolkit.retrieve_entity_property_values("proj", "test"), + "6, 5, 4, 3, 2 and 2 more distinct values.", + ) + self.assertEqual(toolkit.retrieve_entity_property_values("org", "test"), '"7"') + + def test_group_names(self): + GroupTypeMapping.objects.create(team=self.team, group_type_index=0, group_type="proj") + GroupTypeMapping.objects.create(team=self.team, group_type_index=1, group_type="org") + toolkit = TrendsAgentToolkit(self.team) + self.assertEqual(toolkit._entity_names, ["person", "session", "proj", "org"]) + + def test_empty_events(self): + toolkit = TrendsAgentToolkit(self.team) + self.assertEqual( + toolkit.retrieve_event_properties("test"), "Properties do not exist in the taxonomy for the event test." + ) + + _create_person( + distinct_ids=["person1"], + team=self.team, + properties={}, + ) + _create_event( + event="event1", + distinct_id="person1", + properties={}, + team=self.team, + ) + + toolkit = TrendsAgentToolkit(self.team) + self.assertEqual( + toolkit.retrieve_event_properties("event1"), + "Properties do not exist in the taxonomy for the event event1.", + ) + + def test_retrieve_event_properties(self): + self._create_taxonomy() + toolkit = TrendsAgentToolkit(self.team) + prompt = toolkit.retrieve_event_properties("event1") + + self.assertIn( + "id
", + prompt, + ) + self.assertIn( + "$browser
", + prompt, + ) + self.assertIn( + "date
", + prompt, + ) + self.assertIn( + "bool
", + prompt, + ) + + def test_retrieve_event_property_values(self): + self._create_taxonomy() + toolkit = TrendsAgentToolkit(self.team) + + self.assertIn('"Chrome"', toolkit.retrieve_event_property_values("event1", "$browser")) + self.assertIn('"Firefox"', toolkit.retrieve_event_property_values("event1", "$browser")) + self.assertEqual(toolkit.retrieve_event_property_values("event1", "bool"), "true") + self.assertEqual( + toolkit.retrieve_event_property_values("event1", "id"), + "9, 8, 7, 6, 5 and 5 more distinct values.", + ) + self.assertEqual( + toolkit.retrieve_event_property_values("event1", "date"), f'"{datetime(2024, 1, 1).isoformat()}"' + ) diff --git a/ee/hogai/trends/toolkit.py b/ee/hogai/trends/toolkit.py new file mode 100644 index 0000000000000..a2f438756f3d1 --- /dev/null +++ b/ee/hogai/trends/toolkit.py @@ -0,0 +1,512 @@ +import json +import xml.etree.ElementTree as ET +from functools import cached_property +from textwrap import dedent +from typing import Any, Literal, Optional, TypedDict, Union + +from pydantic import BaseModel, Field, RootModel + +from ee.hogai.hardcoded_definitions import hardcoded_prop_defs +from posthog.hogql.database.schema.channel_type import POSSIBLE_CHANNEL_TYPES +from posthog.hogql_queries.ai.actors_property_taxonomy_query_runner import ActorsPropertyTaxonomyQueryRunner +from posthog.hogql_queries.ai.event_taxonomy_query_runner import EventTaxonomyQueryRunner +from posthog.hogql_queries.query_runner import ExecutionMode +from posthog.models.group_type_mapping import GroupTypeMapping +from posthog.models.property_definition import PropertyDefinition, PropertyType +from posthog.models.team.team import Team +from posthog.schema import ( + ActorsPropertyTaxonomyQuery, + CachedActorsPropertyTaxonomyQueryResponse, + CachedEventTaxonomyQueryResponse, + EventTaxonomyQuery, + ExperimentalAITrendsQuery, +) + + +class ToolkitTool(TypedDict): + name: str + signature: str + description: str + + +class RetrieveEntityPropertiesValuesArgsModel(BaseModel): + entity: str + property_name: str + + +class RetrieveEntityPropertiesValuesModel(BaseModel): + name: Literal["retrieve_entity_property_values"] + arguments: RetrieveEntityPropertiesValuesArgsModel + + +class RetrieveEventPropertiesValuesArgsModel(BaseModel): + event_name: str + property_name: str + + +class RetrieveEventPropertiesValuesModel(BaseModel): + name: Literal["retrieve_event_property_values"] + arguments: RetrieveEventPropertiesValuesArgsModel + + +class SingleArgumentTrendsAgentToolModel(BaseModel): + name: Literal[ + "retrieve_entity_properties", + "retrieve_event_properties", + "final_answer", + "handle_incorrect_response", + ] + arguments: str + + +class TrendsAgentToolModel( + RootModel[ + Union[ + SingleArgumentTrendsAgentToolModel, RetrieveEntityPropertiesValuesModel, RetrieveEventPropertiesValuesModel + ] + ] +): + root: Union[ + SingleArgumentTrendsAgentToolModel, RetrieveEntityPropertiesValuesModel, RetrieveEventPropertiesValuesModel + ] = Field(..., discriminator="name") + + +class TrendsAgentToolkit: + _team: Team + + def __init__(self, team: Team): + self._team = team + + @property + def groups(self): + return GroupTypeMapping.objects.filter(team=self._team).order_by("group_type_index") + + @cached_property + def _entity_names(self) -> list[str]: + """ + The schemas use `group_type_index` for groups complicating things for the agent. Instead, we use groups' names, + so the generation step will handle their indexes. Tools would need to support multiple arguments, or we would need + to create various tools for different group types. Since we don't use function calling here, we want to limit the + number of tools because non-function calling models can't handle many tools. + """ + entities = [ + "person", + "session", + *[group.group_type for group in self.groups], + ] + return entities + + @cached_property + def tools(self) -> list[ToolkitTool]: + """ + Our ReAct agent doesn't use function calling. Instead, it uses tools in natural language to decide next steps. The agent expects the following format: + + ``` + retrieve_entity_properties_tool(entity: "Literal['person', 'session', 'organization', 'instance', 'project']") - description. + ``` + + Events and other entities are intentionally separated for properties retrieval. Potentially, there can be different functions for each entity type. + """ + + stringified_entities = ", ".join([f"'{entity}'" for entity in self._entity_names]) + + tools: list[ToolkitTool] = [ + { + "name": tool["name"], + "signature": tool["signature"], + "description": dedent(tool["description"]), + } + for tool in [ + { + "name": "retrieve_event_properties", + "signature": "(event_name: str)", + "description": """ + Use this tool to retrieve property names of an event that the user has in their taxonomy. You will receive a list of properties, their value types and example values or a message that properties have not been found. + + - **Try other events** if the tool doesn't return any properties. + - **Prioritize properties that are directly related to the context or objective of the user's query.** + - **Avoid using ambiguous properties** unless their relevance is explicitly confirmed. + + Args: + event_name: The name of the event that you want to retrieve properties for. + """, + }, + { + "name": "retrieve_event_property_values", + "signature": "(event_name: str, property_name: str)", + "description": """ + Use this tool to retrieve property values for an event that the user has in their taxonomy. Adjust filters to these values. You will receive a list of property values or a message that property values have not been found. Some properties can have many values, so the output will be truncated. Use your judgement to find a proper value. + + Args: + event_name: The name of the event that you want to retrieve values for. + property_name: The name of the property that you want to retrieve values for. + """, + }, + { + "name": f"retrieve_entity_properties", + "signature": f"(entity: Literal[{stringified_entities}])", + "description": """ + Use this tool to retrieve property names for a property group (entity) that the user has in their taxonomy. You will receive a list of properties and their value types or a message that properties have not been found. + + - **Infer the property groups from the user's request.** + - **Try other entities** if the tool doesn't return any properties. + - **Prioritize properties that are directly related to the context or objective of the user's query.** + - **Avoid using ambiguous properties** unless their relevance is explicitly confirmed. + + Args: + entity: The type of the entity that you want to retrieve properties for. + """, + }, + { + "name": "retrieve_entity_property_values", + "signature": f"(entity: Literal[{stringified_entities}], property_name: str)", + "description": """ + Use this tool to retrieve property values for a property name that the user has in their taxonomy. Adjust filters to these values. You will receive a list of property values or a message that property values have not been found. Some properties can have many values, so the output will be truncated. Use your judgement to find a proper value. + + Args: + entity: The type of the entity that you want to retrieve properties for. + property_name: The name of the property that you want to retrieve values for. + """, + }, + { + "name": "final_answer", + "signature": "(final_response: str)", + "description": """ + Use this tool to provide the final answer to the user's question. + + Answer in the following format: + ``` + Events: + - event 1 + - math operation: total + - property filter 1: + - entity + - property name + - property type + - operator + - property value + - property filter 2... Repeat for each property filter. + - event 2 + - math operation: average by `property name`. + - property filter 1: + - entity + - property name + - property type + - operator + - property value + - property filter 2... Repeat for each property filter. + - Repeat for each event. + + (if a formula is used) + Formula: + `A/B`, where `A` is the first event and `B` is the second event. + + (if a breakdown is used) + Breakdown by: + - breakdown 1: + - entity + - property name + - Repeat for each breakdown. + ``` + + Args: + final_response: List all events, actions, and properties that you want to use to answer the question. + """, + }, + ] + ] + + return tools + + def render_text_description(self) -> str: + """ + Render the tool name and description in plain text. + + Returns: + The rendered text. + + Output will be in the format of: + + .. code-block:: markdown + + search: This tool is used for search + calculator: This tool is used for math + """ + descriptions = [] + for tool in self.tools: + description = f"{tool['name']}{tool['signature']} - {tool['description']}" + descriptions.append(description) + return "\n".join(descriptions) + + def _generate_properties_xml(self, children: list[tuple[str, str | None]]): + root = ET.Element("properties") + property_types = {property_type for _, property_type in children if property_type is not None} + property_type_to_tag = {property_type: ET.SubElement(root, property_type) for property_type in property_types} + + for name, property_type in children: + # Do not include properties that are ambiguous. + if property_type is None: + continue + + type_tag = property_type_to_tag[property_type] + ET.SubElement(type_tag, "name").text = name + # Add a line break between names. Doubtful that it does anything. + ET.SubElement(type_tag, "br") + + return ET.tostring(root, encoding="unicode") + + def retrieve_entity_properties(self, entity: str) -> str: + """ + Retrieve properties for an entitiy like person, session, or one of the groups. + """ + if entity not in ("person", "session", *[group.group_type for group in self.groups]): + return f"Entity {entity} does not exist in the taxonomy." + + if entity == "person": + qs = PropertyDefinition.objects.filter(team=self._team, type=PropertyDefinition.Type.PERSON).values_list( + "name", "property_type" + ) + props = list(qs) + elif entity == "session": + # Session properties are not in the DB. + props = [ + (prop_name, prop["type"]) + for prop_name, prop in hardcoded_prop_defs["session_properties"].items() + if prop.get("type") is not None + ] + else: + group_type_index = next( + (group.group_type_index for group in self.groups if group.group_type == entity), None + ) + if group_type_index is None: + return f"Group {entity} does not exist in the taxonomy." + qs = PropertyDefinition.objects.filter( + team=self._team, type=PropertyDefinition.Type.GROUP, group_type_index=group_type_index + ).values_list("name", "property_type") + props = list(qs) + + return self._generate_properties_xml(props) + + def retrieve_event_properties(self, event_name: str) -> str: + """ + Retrieve properties for an event. + """ + runner = EventTaxonomyQueryRunner(EventTaxonomyQuery(event=event_name), self._team) + response = runner.run(ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE_AND_BLOCKING_ON_MISS) + + if not isinstance(response, CachedEventTaxonomyQueryResponse): + return "Properties have not been found." + + if not response.results: + return f"Properties do not exist in the taxonomy for the event {event_name}." + + # Intersect properties with their types. + qs = PropertyDefinition.objects.filter( + team=self._team, type=PropertyDefinition.Type.EVENT, name__in=[item.property for item in response.results] + ) + property_to_type = {property_definition.name: property_definition.property_type for property_definition in qs} + + return self._generate_properties_xml( + [ + (item.property, property_to_type.get(item.property)) + for item in response.results + # Exclude properties that exist in the taxonomy, but don't have a type. + if item.property in property_to_type + ] + ) + + def _format_property_values( + self, sample_values: list, sample_count: Optional[int] = 0, format_as_string: bool = False + ) -> str: + if len(sample_values) == 0 or sample_count == 0: + return f"The property does not have any values in the taxonomy." + + # Add quotes to the String type, so the LLM can easily infer a type. + # Strings like "true" or "10" are interpreted as booleans or numbers without quotes, so the schema generation fails. + # Remove the floating point the value is an integer. + formatted_sample_values: list[str] = [] + for value in sample_values: + if format_as_string: + formatted_sample_values.append(f'"{value}"') + elif isinstance(value, float) and value.is_integer(): + formatted_sample_values.append(str(int(value))) + else: + formatted_sample_values.append(str(value)) + prop_values = ", ".join(formatted_sample_values) + + # If there wasn't an exact match with the user's search, we provide a hint that LLM can use an arbitrary value. + if sample_count is None: + return f"{prop_values} and many more distinct values." + elif sample_count > len(sample_values): + diff = sample_count - len(sample_values) + return f"{prop_values} and {diff} more distinct value{'' if diff == 1 else 's'}." + + return prop_values + + def retrieve_event_property_values(self, event_name: str, property_name: str) -> str: + try: + property_definition = PropertyDefinition.objects.get( + team=self._team, name=property_name, type=PropertyDefinition.Type.EVENT + ) + except PropertyDefinition.DoesNotExist: + return f"The property {property_name} does not exist in the taxonomy." + + runner = EventTaxonomyQueryRunner(EventTaxonomyQuery(event=event_name), self._team) + response = runner.run(ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE_AND_BLOCKING_ON_MISS) + + if not isinstance(response, CachedEventTaxonomyQueryResponse): + return f"The event {event_name} does not exist in the taxonomy." + + if not response.results: + return f"Property values for {property_name} do not exist in the taxonomy for the event {event_name}." + + prop = next((item for item in response.results if item.property == property_name), None) + if not prop: + return f"The property {property_name} does not exist in the taxonomy for the event {event_name}." + + return self._format_property_values( + prop.sample_values, + prop.sample_count, + format_as_string=property_definition.property_type in (PropertyType.String, PropertyType.Datetime), + ) + + def _retrieve_session_properties(self, property_name: str) -> str: + """ + Sessions properties example property values are hardcoded. + """ + if property_name not in hardcoded_prop_defs["session_properties"]: + return f"The property {property_name} does not exist in the taxonomy." + + if property_name == "$channel_type": + sample_values = POSSIBLE_CHANNEL_TYPES.copy() + sample_count = len(sample_values) + is_str = True + elif ( + property_name in hardcoded_prop_defs["session_properties"] + and "examples" in hardcoded_prop_defs["session_properties"][property_name] + ): + sample_values = hardcoded_prop_defs["session_properties"][property_name]["examples"] + sample_count = None + is_str = hardcoded_prop_defs["session_properties"][property_name]["type"] == PropertyType.String + else: + return f"Property values for {property_name} do not exist in the taxonomy for the session entity." + + return self._format_property_values(sample_values, sample_count, format_as_string=is_str) + + def retrieve_entity_property_values(self, entity: str, property_name: str) -> str: + if entity not in self._entity_names: + return f"The entity {entity} does not exist in the taxonomy. You must use one of the following: {', '.join(self._entity_names)}." + + if entity == "session": + return self._retrieve_session_properties(property_name) + + if entity == "person": + query = ActorsPropertyTaxonomyQuery(property=property_name) + else: + group_index = next((group.group_type_index for group in self.groups if group.group_type == entity), None) + if group_index is None: + return f"The entity {entity} does not exist in the taxonomy." + query = ActorsPropertyTaxonomyQuery(group_type_index=group_index, property=property_name) + + try: + if query.group_type_index is not None: + prop_type = PropertyDefinition.Type.GROUP + group_type_index = query.group_type_index + else: + prop_type = PropertyDefinition.Type.PERSON + group_type_index = None + + property_definition = PropertyDefinition.objects.get( + team=self._team, + name=property_name, + type=prop_type, + group_type_index=group_type_index, + ) + except PropertyDefinition.DoesNotExist: + return f"The property {property_name} does not exist in the taxonomy for the entity {entity}." + + response = ActorsPropertyTaxonomyQueryRunner(query, self._team).run( + ExecutionMode.RECENT_CACHE_CALCULATE_ASYNC_IF_STALE_AND_BLOCKING_ON_MISS + ) + + if not isinstance(response, CachedActorsPropertyTaxonomyQueryResponse): + return f"The entity {entity} does not exist in the taxonomy." + + if not response.results: + return f"Property values for {property_name} do not exist in the taxonomy for the entity {entity}." + + return self._format_property_values( + response.results.sample_values, + response.results.sample_count, + format_as_string=property_definition.property_type in (PropertyType.String, PropertyType.Datetime), + ) + + def handle_incorrect_response(self, response: str) -> str: + """ + No-op tool. Take a parsing error and return a response that the LLM can use to correct itself. + Used to control a number of retries. + """ + return response + + +class GenerateTrendTool: + def _replace_value_in_dict(self, item: Any, original_schema: Any): + if isinstance(item, list): + return [self._replace_value_in_dict(i, original_schema) for i in item] + elif isinstance(item, dict): + if list(item.keys()) == ["$ref"]: + definitions = item["$ref"][2:].split("/") + res = original_schema.copy() + for definition in definitions: + res = res[definition] + return res + else: + return {key: self._replace_value_in_dict(i, original_schema) for key, i in item.items()} + else: + return item + + def _flatten_schema(self): + schema = ExperimentalAITrendsQuery.model_json_schema() + + # Patch `numeric` types + schema["$defs"]["MathGroupTypeIndex"]["type"] = "number" + property_filters = ( + "EventPropertyFilter", + "PersonPropertyFilter", + "SessionPropertyFilter", + "FeaturePropertyFilter", + ) + + # Clean up the property filters + for key in property_filters: + property_schema = schema["$defs"][key] + property_schema["properties"]["key"]["description"] = ( + f"Use one of the properties the user has provided in the plan." + ) + + for _ in range(100): + if "$ref" not in json.dumps(schema): + break + schema = self._replace_value_in_dict(schema.copy(), schema.copy()) + del schema["$defs"] + return schema + + @cached_property + def schema(self): + return { + "name": "output_insight_schema", + "description": "Outputs the JSON schema of a product analytics insight", + "parameters": { + "type": "object", + "properties": { + "reasoning_steps": { + "type": "array", + "items": {"type": "string"}, + "description": "The reasoning steps leading to the final conclusion that will be shown to the user. Use 'you' if you want to refer to the user.", + }, + "answer": self._flatten_schema(), + }, + "additionalProperties": False, + "required": ["reasoning_steps", "answer"], + }, + } diff --git a/ee/hogai/trends/utils.py b/ee/hogai/trends/utils.py new file mode 100644 index 0000000000000..080f85f0256d0 --- /dev/null +++ b/ee/hogai/trends/utils.py @@ -0,0 +1,10 @@ +from typing import Optional + +from pydantic import BaseModel + +from posthog.schema import ExperimentalAITrendsQuery + + +class GenerateTrendOutputModel(BaseModel): + reasoning_steps: Optional[list[str]] = None + answer: Optional[ExperimentalAITrendsQuery] = None diff --git a/ee/hogai/trends_function.py b/ee/hogai/trends_function.py deleted file mode 100644 index 6f57b47506578..0000000000000 --- a/ee/hogai/trends_function.py +++ /dev/null @@ -1,71 +0,0 @@ -import json -from functools import cached_property -from typing import Any - -from ee.hogai.team_prompt import TeamPrompt -from posthog.models.property_definition import PropertyDefinition -from posthog.schema import ExperimentalAITrendsQuery - - -class TrendsFunction: - def _replace_value_in_dict(self, item: Any, original_schema: Any): - if isinstance(item, list): - return [self._replace_value_in_dict(i, original_schema) for i in item] - elif isinstance(item, dict): - if list(item.keys()) == ["$ref"]: - definitions = item["$ref"][2:].split("/") - res = original_schema.copy() - for definition in definitions: - res = res[definition] - return res - else: - return {key: self._replace_value_in_dict(i, original_schema) for key, i in item.items()} - else: - return item - - @cached_property - def _flat_schema(self): - schema = ExperimentalAITrendsQuery.model_json_schema() - - # Patch `numeric` types - schema["$defs"]["MathGroupTypeIndex"]["type"] = "number" - - # Clean up the property filters - for key, title in ( - ("EventPropertyFilter", PropertyDefinition.Type.EVENT.label), - ("PersonPropertyFilter", PropertyDefinition.Type.PERSON.label), - ("SessionPropertyFilter", PropertyDefinition.Type.SESSION.label), - ("FeaturePropertyFilter", "feature"), - ("CohortPropertyFilter", "cohort"), - ): - property_schema = schema["$defs"][key] - property_schema["properties"]["key"]["description"] = ( - f"Use one of the properties the user has provided in the <{TeamPrompt.get_properties_tag_name(title)}> tag." - ) - - for _ in range(100): - if "$ref" not in json.dumps(schema): - break - schema = self._replace_value_in_dict(schema.copy(), schema.copy()) - del schema["$defs"] - return schema - - def generate_function(self): - return { - "type": "function", - "function": { - "name": "output_insight_schema", - "description": "Outputs the JSON schema of a product analytics insight", - "parameters": { - "type": "object", - "properties": { - "reasoning_steps": { - "type": "array", - "items": {"type": "string"}, - "description": "The reasoning steps leading to the final conclusion that will be shown to the user. Use 'you' if you want to refer to the user.", - }, - "answer": self._flat_schema, - }, - }, - }, - } diff --git a/ee/hogai/utils.py b/ee/hogai/utils.py new file mode 100644 index 0000000000000..65de9303b3ffc --- /dev/null +++ b/ee/hogai/utils.py @@ -0,0 +1,52 @@ +import operator +from abc import ABC, abstractmethod +from collections.abc import Sequence +from enum import StrEnum +from typing import Annotated, Optional, TypedDict, Union + +from langchain_core.agents import AgentAction +from langchain_core.runnables import RunnableConfig +from langgraph.graph import END, START +from pydantic import BaseModel, Field + +from posthog.models.team.team import Team +from posthog.schema import AssistantMessage, HumanMessage, RootAssistantMessage, VisualizationMessage + +AssistantMessageUnion = Union[AssistantMessage, HumanMessage, VisualizationMessage] + + +class Conversation(BaseModel): + messages: list[RootAssistantMessage] = Field(..., min_length=1, max_length=20) + session_id: str + + +class AssistantState(TypedDict): + messages: Annotated[Sequence[AssistantMessageUnion], operator.add] + intermediate_steps: Optional[list[tuple[AgentAction, Optional[str]]]] + plan: Optional[str] + tool_argument: Optional[str] + + +class AssistantNodeName(StrEnum): + START = START + END = END + CREATE_TRENDS_PLAN = "create_trends_plan" + CREATE_TRENDS_PLAN_TOOLS = "create_trends_plan_tools" + GENERATE_TRENDS = "generate_trends_schema" + GENERATE_TRENDS_TOOLS = "generate_trends_tools" + + +class AssistantNode(ABC): + name: AssistantNodeName + _team: Team + + def __init__(self, team: Team): + self._team = team + + @abstractmethod + def run(cls, state: AssistantState, config: RunnableConfig): + raise NotImplementedError + + +def remove_line_breaks(line: str) -> str: + return line.replace("\n", " ") diff --git a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr index 2a88759e116ee..c21b2882da6ed 100644 --- a/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr +++ b/ee/session_recordings/queries/test/__snapshots__/test_session_recording_list_from_filters.ambr @@ -16,12 +16,13 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_4)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_7)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), now64(6, %(hogql_val_9)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_10)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) GROUP BY events.`$session_id` HAVING 1))) GROUP BY s.session_id @@ -47,9 +48,10 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_4)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events LEFT OUTER JOIN @@ -59,12 +61,12 @@ GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_7)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id FROM person WHERE equals(person.team_id, 2) GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_8)s), person.version), plus(now64(6, %(hogql_val_9)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_10)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), now64(6, %(hogql_val_12)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_13)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_15)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_16)s), 0)) GROUP BY events.`$session_id` HAVING 1))) GROUP BY s.session_id @@ -90,9 +92,10 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_4)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events LEFT OUTER JOIN @@ -102,12 +105,12 @@ GROUP BY person_distinct_id_overrides.distinct_id HAVING ifNull(equals(argMax(person_distinct_id_overrides.is_deleted, person_distinct_id_overrides.version), 0), 0) SETTINGS optimize_aggregation_in_order=1) AS events__override ON equals(events.distinct_id, events__override.distinct_id) LEFT JOIN - (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_7)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id + (SELECT argMax(replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(person.properties, %(hogql_val_8)s), ''), 'null'), '^"|"$', ''), person.version) AS properties___rgInternal, person.id AS id FROM person WHERE equals(person.team_id, 2) GROUP BY person.id - HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_8)s), person.version), plus(now64(6, %(hogql_val_9)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_10)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), now64(6, %(hogql_val_12)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_13)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_15)s), 0)) + HAVING and(ifNull(equals(argMax(person.is_deleted, person.version), 0), 0), ifNull(less(argMax(toTimeZone(person.created_at, %(hogql_val_9)s), person.version), plus(now64(6, %(hogql_val_10)s), toIntervalDay(1))), 0)) SETTINGS optimize_aggregation_in_order=1) AS events__person ON equals(if(not(empty(events__override.distinct_id)), events__override.person_id, events.person_id), events__person.id) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), now64(6, %(hogql_val_13)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_14)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_15)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(events__person.properties___rgInternal, %(hogql_val_16)s), 0)) GROUP BY events.`$session_id` HAVING 1))) GROUP BY s.session_id @@ -133,12 +136,13 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_4)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_7)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), now64(6, %(hogql_val_9)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_10)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) GROUP BY events.`$session_id` HAVING 1))) GROUP BY s.session_id @@ -164,12 +168,13 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, %(hogql_val_3)s)), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff(%(hogql_val_4)s, start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s - WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_4)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, + WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_5)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_6)s), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, %(hogql_val_7)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id FROM events - WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_7)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), now64(6, %(hogql_val_9)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_10)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_12)s), 0)) + WHERE and(equals(events.team_id, 2), notEmpty(events.`$session_id`), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_8)s), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_9)s), now64(6, %(hogql_val_10)s)), greaterOrEquals(toTimeZone(events.timestamp, %(hogql_val_11)s), toDateTime64('2020-12-24 23:58:00.000000', 6, 'UTC')), lessOrEquals(toTimeZone(events.timestamp, %(hogql_val_12)s), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), ifNull(equals(nullIf(nullIf(mat_pp_rgInternal, ''), 'null'), %(hogql_val_13)s), 0)) GROUP BY events.`$session_id` HAVING 1))) GROUP BY s.session_id @@ -195,7 +200,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -228,7 +234,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -266,7 +273,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -299,7 +307,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -337,7 +346,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -370,7 +380,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -408,7 +419,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -441,7 +453,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -479,7 +492,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -512,7 +526,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -565,7 +580,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -598,7 +614,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -651,7 +668,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -684,7 +702,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -737,7 +756,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -770,7 +790,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -823,7 +844,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -856,7 +878,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -894,7 +917,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -927,7 +951,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -965,7 +990,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -998,7 +1024,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -1036,7 +1063,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0)) GROUP BY s.session_id @@ -1069,7 +1097,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-25 00:00:00.000000', 6, 'UTC')), 0), ifNull(lessOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2021-01-01 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT events.`$session_id` AS session_id @@ -1107,7 +1136,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1143,7 +1173,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1179,7 +1210,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1215,7 +1247,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1251,7 +1284,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1293,7 +1327,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1335,7 +1370,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1377,7 +1413,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1419,7 +1456,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1461,7 +1499,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1503,7 +1542,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` @@ -1545,7 +1585,8 @@ sum(s.console_log_count) AS console_log_count, sum(s.console_warn_count) AS console_warn_count, sum(s.console_error_count) AS console_error_count, - ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing + ifNull(greaterOrEquals(max(toTimeZone(s._timestamp, 'UTC')), toDateTime64('2021-01-01 13:41:23.000000', 6, 'UTC')), 0) AS ongoing, + round(multiply(divide(plus(plus(plus(divide(sum(s.active_milliseconds), 1000), sum(s.click_count)), sum(s.keypress_count)), sum(s.console_error_count)), plus(plus(plus(plus(sum(s.mouse_activity_count), dateDiff('SECOND', start_time, end_time)), sum(s.console_error_count)), sum(s.console_log_count)), sum(s.console_warn_count))), 100), 2) AS activity_score FROM session_replay_events AS s WHERE and(equals(s.team_id, 2), ifNull(greaterOrEquals(toTimeZone(s.min_first_timestamp, 'UTC'), toDateTime64('2020-12-11 13:46:23.000000', 6, 'UTC')), 0), in(s.session_id, (SELECT DISTINCT events.`$session_id` AS `$session_id` diff --git a/ee/settings.py b/ee/settings.py index 9844074a95631..64e3bfc5b8b3c 100644 --- a/ee/settings.py +++ b/ee/settings.py @@ -4,11 +4,10 @@ import os -from posthog.settings import AUTHENTICATION_BACKENDS, DEMO, SITE_URL, DEBUG +from posthog.settings import AUTHENTICATION_BACKENDS, DEBUG, DEMO, SITE_URL from posthog.settings.utils import get_from_env from posthog.utils import str_to_bool - # SSO AUTHENTICATION_BACKENDS = [ *AUTHENTICATION_BACKENDS, @@ -69,3 +68,8 @@ ) HOOK_HOG_FUNCTION_TEAMS = get_from_env("HOOK_HOG_FUNCTION_TEAMS", "", type_cast=str) + +# Assistant +LANGFUSE_PUBLIC_KEY = get_from_env("LANGFUSE_PUBLIC_KEY", "", type_cast=str) +LANGFUSE_SECRET_KEY = get_from_env("LANGFUSE_SECRET_KEY", "", type_cast=str) +LANGFUSE_HOST = get_from_env("LANGFUSE_HOST", "https://us.cloud.langfuse.com", type_cast=str) diff --git a/frontend/__snapshots__/components-activitylog--insight-activity--dark.png b/frontend/__snapshots__/components-activitylog--insight-activity--dark.png index 02301a7432364..9fa6256d0c217 100644 Binary files a/frontend/__snapshots__/components-activitylog--insight-activity--dark.png and b/frontend/__snapshots__/components-activitylog--insight-activity--dark.png differ diff --git a/frontend/__snapshots__/components-activitylog--insight-activity--light.png b/frontend/__snapshots__/components-activitylog--insight-activity--light.png index 38e6385740f05..bc40839af4c33 100644 Binary files a/frontend/__snapshots__/components-activitylog--insight-activity--light.png and b/frontend/__snapshots__/components-activitylog--insight-activity--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--funnel--dark.png b/frontend/__snapshots__/components-cards-insight-details--funnel--dark.png index 4b3bbfb7ef31a..b06167e769fdd 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--funnel--dark.png and b/frontend/__snapshots__/components-cards-insight-details--funnel--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--funnel--light.png b/frontend/__snapshots__/components-cards-insight-details--funnel--light.png index 7b27a2a43b0bc..5d111346c0470 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--funnel--light.png and b/frontend/__snapshots__/components-cards-insight-details--funnel--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--lifecycle--dark.png b/frontend/__snapshots__/components-cards-insight-details--lifecycle--dark.png index dd7cf8f8f0bab..2e76b7bc519d0 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--lifecycle--dark.png and b/frontend/__snapshots__/components-cards-insight-details--lifecycle--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--lifecycle--light.png b/frontend/__snapshots__/components-cards-insight-details--lifecycle--light.png index 99bc9098d4009..f3469de9d12a5 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--lifecycle--light.png and b/frontend/__snapshots__/components-cards-insight-details--lifecycle--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--stickiness--dark.png b/frontend/__snapshots__/components-cards-insight-details--stickiness--dark.png index 830631927cce0..2e76b7bc519d0 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--stickiness--dark.png and b/frontend/__snapshots__/components-cards-insight-details--stickiness--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--stickiness--light.png b/frontend/__snapshots__/components-cards-insight-details--stickiness--light.png index 890ece473187e..f3469de9d12a5 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--stickiness--light.png and b/frontend/__snapshots__/components-cards-insight-details--stickiness--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends--dark.png index 03aec116ddc62..0c52887653c4d 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends--light.png b/frontend/__snapshots__/components-cards-insight-details--trends--light.png index 466db9c1347eb..d6e587e87dad6 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--dark.png index 97bec961e91cb..a0f124d36649d 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--light.png index fad0cd75a960d..b2529fdabb056 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-horizontal-bar--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png index 331ace08cbf58..d5687670ca845 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-multi--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png index ab1312b31681b..d40e270309c25 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-multi--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-pie--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-pie--dark.png index 97bec961e91cb..a0f124d36649d 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-pie--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-pie--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-pie--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-pie--light.png index fad0cd75a960d..b2529fdabb056 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-pie--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-pie--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-table--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-table--dark.png index 97bec961e91cb..a0f124d36649d 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-table--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-table--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-table--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-table--light.png index fad0cd75a960d..b2529fdabb056 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-table--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-table--light.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-world-map--dark.png b/frontend/__snapshots__/components-cards-insight-details--trends-world-map--dark.png index 6b08cf6fd3d08..2d14d0f3f17d7 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-world-map--dark.png and b/frontend/__snapshots__/components-cards-insight-details--trends-world-map--dark.png differ diff --git a/frontend/__snapshots__/components-cards-insight-details--trends-world-map--light.png b/frontend/__snapshots__/components-cards-insight-details--trends-world-map--light.png index 975a66aad5e55..c2afe2d05cbb1 100644 Binary files a/frontend/__snapshots__/components-cards-insight-details--trends-world-map--light.png and b/frontend/__snapshots__/components-cards-insight-details--trends-world-map--light.png differ diff --git a/frontend/__snapshots__/components-playlist--default--dark.png b/frontend/__snapshots__/components-playlist--default--dark.png index 78a17bfdf2b8b..468e971365877 100644 Binary files a/frontend/__snapshots__/components-playlist--default--dark.png and b/frontend/__snapshots__/components-playlist--default--dark.png differ diff --git a/frontend/__snapshots__/components-playlist--default--light.png b/frontend/__snapshots__/components-playlist--default--light.png index 40f9f663960a2..38ca665d24d64 100644 Binary files a/frontend/__snapshots__/components-playlist--default--light.png and b/frontend/__snapshots__/components-playlist--default--light.png differ diff --git a/frontend/__snapshots__/components-playlist--multiple-sections--dark.png b/frontend/__snapshots__/components-playlist--multiple-sections--dark.png index bb70cad772b92..98a2b91fbfc6d 100644 Binary files a/frontend/__snapshots__/components-playlist--multiple-sections--dark.png and b/frontend/__snapshots__/components-playlist--multiple-sections--dark.png differ diff --git a/frontend/__snapshots__/components-playlist--multiple-sections--light.png b/frontend/__snapshots__/components-playlist--multiple-sections--light.png index 10bbdebec2c12..19d2b358256f8 100644 Binary files a/frontend/__snapshots__/components-playlist--multiple-sections--light.png and b/frontend/__snapshots__/components-playlist--multiple-sections--light.png differ diff --git a/frontend/__snapshots__/components-playlist--with-footer--dark.png b/frontend/__snapshots__/components-playlist--with-footer--dark.png index 78a17bfdf2b8b..468e971365877 100644 Binary files a/frontend/__snapshots__/components-playlist--with-footer--dark.png and b/frontend/__snapshots__/components-playlist--with-footer--dark.png differ diff --git a/frontend/__snapshots__/components-playlist--with-footer--light.png b/frontend/__snapshots__/components-playlist--with-footer--light.png index 40f9f663960a2..38ca665d24d64 100644 Binary files a/frontend/__snapshots__/components-playlist--with-footer--light.png and b/frontend/__snapshots__/components-playlist--with-footer--light.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png new file mode 100644 index 0000000000000..cb009d55395dd Binary files /dev/null and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--dark.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png new file mode 100644 index 0000000000000..4c8ac367ea329 Binary files /dev/null and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-hidden--light.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-searchable--dark.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-searchable--dark.png new file mode 100644 index 0000000000000..d8a5618560ff3 Binary files /dev/null and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-searchable--dark.png differ diff --git a/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-searchable--light.png b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-searchable--light.png new file mode 100644 index 0000000000000..88a2b1085bae5 Binary files /dev/null and b/frontend/__snapshots__/components-properties-table--dollar-properties-on-person-searchable--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--event-table-insight--dark.png b/frontend/__snapshots__/exporter-exporter--event-table-insight--dark.png index cf6449f3311d3..27f9ce425952e 100644 Binary files a/frontend/__snapshots__/exporter-exporter--event-table-insight--dark.png and b/frontend/__snapshots__/exporter-exporter--event-table-insight--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--event-table-insight--light.png b/frontend/__snapshots__/exporter-exporter--event-table-insight--light.png index a103d51e26771..c770b9ea51123 100644 Binary files a/frontend/__snapshots__/exporter-exporter--event-table-insight--light.png and b/frontend/__snapshots__/exporter-exporter--event-table-insight--light.png differ diff --git a/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--dark.png b/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--dark.png index 116e773e02c4e..72e68a0e0126a 100644 Binary files a/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--dark.png and b/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--dark.png differ diff --git a/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--light.png b/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--light.png index 50bbf12ca4b16..5334f7abc4f78 100644 Binary files a/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--light.png and b/frontend/__snapshots__/exporter-exporter--sql-insight-no-results--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options--dark.png b/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options--dark.png index cbb31b2fef96d..5b9d724c95a96 100644 Binary files a/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options--dark.png and b/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options--light.png b/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options--light.png index 84d1b755c6ffd..c820ba1044cd8 100644 Binary files a/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options--light.png and b/frontend/__snapshots__/lemon-ui-colors--all-three-thousand-color-options--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-checkbox--bordered--dark.png b/frontend/__snapshots__/lemon-ui-lemon-checkbox--bordered--dark.png index 6dba78ce8a720..9b3f25ef524ce 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-checkbox--bordered--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-checkbox--bordered--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-checkbox--bordered--light.png b/frontend/__snapshots__/lemon-ui-lemon-checkbox--bordered--light.png index 92b06894e5bdd..f24e45c527717 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-checkbox--bordered--light.png and b/frontend/__snapshots__/lemon-ui-lemon-checkbox--bordered--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-checkbox--overview--dark.png b/frontend/__snapshots__/lemon-ui-lemon-checkbox--overview--dark.png index 9d11b77a107be..aadf83a5cca37 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-checkbox--overview--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-checkbox--overview--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-checkbox--overview--light.png b/frontend/__snapshots__/lemon-ui-lemon-checkbox--overview--light.png index 467fff8f7e1ca..6020cb5ecb1bb 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-checkbox--overview--light.png and b/frontend/__snapshots__/lemon-ui-lemon-checkbox--overview--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-field--fields-with-kea-form--dark.png b/frontend/__snapshots__/lemon-ui-lemon-field--fields-with-kea-form--dark.png index 25f23db23c6e6..8fbdfe8b76fef 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-field--fields-with-kea-form--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-field--fields-with-kea-form--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-field--fields-with-kea-form--light.png b/frontend/__snapshots__/lemon-ui-lemon-field--fields-with-kea-form--light.png index 8e192d169ba4f..25d4b1bc1c185 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-field--fields-with-kea-form--light.png and b/frontend/__snapshots__/lemon-ui-lemon-field--fields-with-kea-form--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-field--pure-fields--dark.png b/frontend/__snapshots__/lemon-ui-lemon-field--pure-fields--dark.png index 24eac264804e4..99779f0baae6d 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-field--pure-fields--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-field--pure-fields--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-field--pure-fields--light.png b/frontend/__snapshots__/lemon-ui-lemon-field--pure-fields--light.png index df046c562eb4f..24cf27496c843 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-field--pure-fields--light.png and b/frontend/__snapshots__/lemon-ui-lemon-field--pure-fields--light.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-progress-circle--overview--dark.png b/frontend/__snapshots__/lemon-ui-lemon-progress-circle--overview--dark.png index f271cd3ec278a..50a7c500c0453 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-progress-circle--overview--dark.png and b/frontend/__snapshots__/lemon-ui-lemon-progress-circle--overview--dark.png differ diff --git a/frontend/__snapshots__/lemon-ui-lemon-progress-circle--overview--light.png b/frontend/__snapshots__/lemon-ui-lemon-progress-circle--overview--light.png index e703172e04fa2..966d2e5bd7c3c 100644 Binary files a/frontend/__snapshots__/lemon-ui-lemon-progress-circle--overview--light.png and b/frontend/__snapshots__/lemon-ui-lemon-progress-circle--overview--light.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png index 789d794e15160..e88dccd9f2b55 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--dark.png differ diff --git a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png index bb0ee5866829e..3c6d5e12507d6 100644 Binary files a/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png and b/frontend/__snapshots__/replay-player-failure--recent-recordings-404--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png index b2d4c263e841a..102a4203a02af 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png index 600363cbe918f..a0cb77b34bb3d 100644 Binary files a/frontend/__snapshots__/replay-player-success--recent-recordings--light.png and b/frontend/__snapshots__/replay-player-success--recent-recordings--light.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png index 085e1712a86b5..cb2256b059680 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--dark.png differ diff --git a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png index 0aca8202e0d92..38266f259b5b3 100644 Binary files a/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png and b/frontend/__snapshots__/replay-player-success--second-recording-in-list--light.png differ diff --git a/frontend/__snapshots__/replay-watch-next-panel--disabled--dark.png b/frontend/__snapshots__/replay-watch-next-panel--disabled--dark.png new file mode 100644 index 0000000000000..314fc70e79b2c Binary files /dev/null and b/frontend/__snapshots__/replay-watch-next-panel--disabled--dark.png differ diff --git a/frontend/__snapshots__/replay-watch-next-panel--disabled--light.png b/frontend/__snapshots__/replay-watch-next-panel--disabled--light.png new file mode 100644 index 0000000000000..4b77ee8e8e5e3 Binary files /dev/null and b/frontend/__snapshots__/replay-watch-next-panel--disabled--light.png differ diff --git a/frontend/__snapshots__/replay-watch-next-panel--empty--dark.png b/frontend/__snapshots__/replay-watch-next-panel--empty--dark.png new file mode 100644 index 0000000000000..aa1dbc4f6b5b5 Binary files /dev/null and b/frontend/__snapshots__/replay-watch-next-panel--empty--dark.png differ diff --git a/frontend/__snapshots__/replay-watch-next-panel--empty--light.png b/frontend/__snapshots__/replay-watch-next-panel--empty--light.png new file mode 100644 index 0000000000000..458a79009b242 Binary files /dev/null and b/frontend/__snapshots__/replay-watch-next-panel--empty--light.png differ diff --git a/frontend/__snapshots__/replay-watch-next-panel--loading--dark.png b/frontend/__snapshots__/replay-watch-next-panel--loading--dark.png new file mode 100644 index 0000000000000..19ee177f1ef86 Binary files /dev/null and b/frontend/__snapshots__/replay-watch-next-panel--loading--dark.png differ diff --git a/frontend/__snapshots__/replay-watch-next-panel--loading--light.png b/frontend/__snapshots__/replay-watch-next-panel--loading--light.png new file mode 100644 index 0000000000000..90c47df650fca Binary files /dev/null and b/frontend/__snapshots__/replay-watch-next-panel--loading--light.png differ diff --git a/frontend/__snapshots__/replay-watch-next-panel--scores--dark.png b/frontend/__snapshots__/replay-watch-next-panel--scores--dark.png new file mode 100644 index 0000000000000..d415312219653 Binary files /dev/null and b/frontend/__snapshots__/replay-watch-next-panel--scores--dark.png differ diff --git a/frontend/__snapshots__/replay-watch-next-panel--scores--light.png b/frontend/__snapshots__/replay-watch-next-panel--scores--light.png new file mode 100644 index 0000000000000..037153a091079 Binary files /dev/null and b/frontend/__snapshots__/replay-watch-next-panel--scores--light.png differ diff --git a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png index 0d0cd6d752a46..6afb89461f228 100644 Binary files a/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png and b/frontend/__snapshots__/scenes-app-errortracking--group-page--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png index 5a042b0f3b12e..30d04ac4dcce1 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png index e10f4cc6a6a75..7f1fb1ff1cf1d 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png and b/frontend/__snapshots__/scenes-app-experiments--complete-funnel-experiment--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png index 32591170a12eb..63381344731b3 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--light.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--light.png index 8e6538c1e24dd..4c45d979edb38 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--light.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment--light.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png index 784e386f8b67f..61d46bdc53b8f 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--light.png b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--light.png index 9c8668050ec83..c0d84c532e486 100644 Binary files a/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--light.png and b/frontend/__snapshots__/scenes-app-experiments--running-trend-experiment-many-variants--light.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png index 0725eb97f8df0..5a2eb06823ab6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png index ebceafa1ecce5..e86df67429be0 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png index b6706129af789..d0f525ffb382c 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png index c28a32b736a5e..f8d64397cb918 100644 Binary files a/frontend/__snapshots__/scenes-app-max-ai--thread--light.png and b/frontend/__snapshots__/scenes-app-max-ai--thread--light.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png index 6495b842d4341..726824db9df72 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png index a74d32c607583..af4459e63b7a4 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png and b/frontend/__snapshots__/scenes-app-notebooks--recordings-playlist--light.png differ diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png index 7df6f432cbc78..74ab9873aa914 100644 Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-node-new-hog-function--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--dark.png b/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--dark.png index 7bdac43e78a6f..820998e556ba5 100644 Binary files a/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--dark.png and b/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--light.png b/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--light.png index 533a2c0833692..e7ec3a315684b 100644 Binary files a/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--light.png and b/frontend/__snapshots__/scenes-app-sessionattributionexplorer--session-attribution-explorer--light.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png index 1cfa54e4bfb80..29fe5a51e80b4 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--light.png b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--light.png index ca494887f274e..71f9dac33c3cd 100644 Binary files a/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--light.png and b/frontend/__snapshots__/scenes-app-sidepanels--side-panel-docs--light.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--dark.png b/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--dark.png index 6d4e94f59d95b..00c3ef784c458 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--dark.png and b/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--dark.png differ diff --git a/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--light.png b/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--light.png index a4d1c03591a0c..7f1124a16d059 100644 Binary files a/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--light.png and b/frontend/__snapshots__/scenes-app-surveys--surveys-global-settings--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--dark.png index f1495e24cf60f..83009eedca75c 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--light.png b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--light.png index e751008bc5d59..1ef92fbabedfa 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--light.png and b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--dark.png b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--dark.png index bb709a5c87410..b646b463af3f0 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--dark.png and b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--light.png b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--light.png index 3bda7cb85e52f..475c2c0eff0ef 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--light.png and b/frontend/__snapshots__/scenes-other-billing--billing-unsubscribe-modal-data-pipelines--light.png differ diff --git a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--light.png b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--light.png index 444c0bb0cdc24..8633f30a452d7 100644 Binary files a/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--light.png and b/frontend/__snapshots__/scenes-other-billing--billing-with-credit-cta--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--actions--dark.png b/frontend/__snapshots__/scenes-other-toolbar--actions--dark.png index 7d16576f50ed0..5f6b33fe06f12 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--actions--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--actions--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--actions--light.png b/frontend/__snapshots__/scenes-other-toolbar--actions--light.png index cd1ecee7c7a46..f202e5abcbe2a 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--actions--light.png and b/frontend/__snapshots__/scenes-other-toolbar--actions--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--actions-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--actions-dark--dark.png index f1505fe48b29d..7d66b287789ad 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--actions-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--actions-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--actions-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--actions-dark--light.png index 5ac59acf6dd8e..bf0e04794518a 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--actions-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--actions-dark--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--default--dark.png b/frontend/__snapshots__/scenes-other-toolbar--default--dark.png index c12c2d0aef813..89a609253c9e4 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--default--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--default--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--default--light.png b/frontend/__snapshots__/scenes-other-toolbar--default--light.png index caa5c3f31823f..e2f6298e7900c 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--default--light.png and b/frontend/__snapshots__/scenes-other-toolbar--default--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--default-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--default-dark--dark.png index 1c515a3374e43..0b1ba9d5b38bb 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--default-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--default-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--default-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--default-dark--light.png index a4114f85ddcbd..24149042213d2 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--default-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--default-dark--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--dark.png b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--dark.png index 523eb19bf4797..958c3a8180f92 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--light.png b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--light.png index 84fcc60d37928..113aa274ef69d 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--light.png and b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--dark.png index b0a5521927ba7..bb3710e29705f 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--light.png index b3593cbdd1a96..6a3cce109cc6a 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--events-debugger-empty-dark--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--experiments--dark.png b/frontend/__snapshots__/scenes-other-toolbar--experiments--dark.png index 74ebee5b45dc5..5bf2d673e126b 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--experiments--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--experiments--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--experiments--light.png b/frontend/__snapshots__/scenes-other-toolbar--experiments--light.png index bd3fbbca1ccd7..75498c2dc471e 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--experiments--light.png and b/frontend/__snapshots__/scenes-other-toolbar--experiments--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--feature-flags--dark.png b/frontend/__snapshots__/scenes-other-toolbar--feature-flags--dark.png index 39cf734a11dfc..5428653edc4f6 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--feature-flags--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--feature-flags--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--feature-flags--light.png b/frontend/__snapshots__/scenes-other-toolbar--feature-flags--light.png index a648d688dbff8..eb6543dcb4c82 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--feature-flags--light.png and b/frontend/__snapshots__/scenes-other-toolbar--feature-flags--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--feature-flags-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--feature-flags-dark--dark.png index 035dcb567bafb..e46c1cd5c8a59 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--feature-flags-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--feature-flags-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--feature-flags-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--feature-flags-dark--light.png index e6bd850589912..0d27d842b1a7e 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--feature-flags-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--feature-flags-dark--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png index f608cfb82aff9..532363bec2e0e 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png index cfb3647b74fb1..91387589ac54f 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png index 78ecb45e93619..844bb2ab7b167 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png index c50119609512c..40803fcdd4478 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--heatmap-dark--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--inspect--dark.png b/frontend/__snapshots__/scenes-other-toolbar--inspect--dark.png index 5c394c8d47b19..1807cec97d98f 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--inspect--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--inspect--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--inspect--light.png b/frontend/__snapshots__/scenes-other-toolbar--inspect--light.png index 6f726cd8f063c..4f2e4aee2033d 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--inspect--light.png and b/frontend/__snapshots__/scenes-other-toolbar--inspect--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--inspect-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--inspect-dark--dark.png index 9a0c3228aa9e5..ea3543905b229 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--inspect-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--inspect-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--inspect-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--inspect-dark--light.png index 33018ee4116aa..6818c6b7b16b1 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--inspect-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--inspect-dark--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--minimized--dark.png b/frontend/__snapshots__/scenes-other-toolbar--minimized--dark.png index 4293ac7a888c2..5e191ec165bb0 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--minimized--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--minimized--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--minimized--light.png b/frontend/__snapshots__/scenes-other-toolbar--minimized--light.png index 6cef713183099..4e21e542cad79 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--minimized--light.png and b/frontend/__snapshots__/scenes-other-toolbar--minimized--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--minimized-dark--dark.png b/frontend/__snapshots__/scenes-other-toolbar--minimized-dark--dark.png index 8215d29cf6f50..24b9ca2c243e5 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--minimized-dark--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--minimized-dark--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--minimized-dark--light.png b/frontend/__snapshots__/scenes-other-toolbar--minimized-dark--light.png index c52d993540a00..7da3a3b0bb887 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--minimized-dark--light.png and b/frontend/__snapshots__/scenes-other-toolbar--minimized-dark--light.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--dark.png b/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--dark.png index 4ed719d8732c4..61d74438e5157 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--dark.png and b/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--dark.png differ diff --git a/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--light.png b/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--light.png index e45d7d9e63a16..631c297ebb36d 100644 Binary files a/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--light.png and b/frontend/__snapshots__/scenes-other-toolbar--unauthenticated--light.png differ diff --git a/frontend/public/services/discord.png b/frontend/public/services/discord.png new file mode 100644 index 0000000000000..d21ba297b0d9c Binary files /dev/null and b/frontend/public/services/discord.png differ diff --git a/frontend/public/services/june.png b/frontend/public/services/june.png new file mode 100644 index 0000000000000..020339e36b3eb Binary files /dev/null and b/frontend/public/services/june.png differ diff --git a/frontend/public/services/klaviyo.png b/frontend/public/services/klaviyo.png new file mode 100644 index 0000000000000..07c9f2764489a Binary files /dev/null and b/frontend/public/services/klaviyo.png differ diff --git a/frontend/public/services/mailchimp.png b/frontend/public/services/mailchimp.png new file mode 100644 index 0000000000000..d57818566818b Binary files /dev/null and b/frontend/public/services/mailchimp.png differ diff --git a/frontend/public/services/microsoft-teams.png b/frontend/public/services/microsoft-teams.png new file mode 100644 index 0000000000000..8c06355885650 Binary files /dev/null and b/frontend/public/services/microsoft-teams.png differ diff --git a/frontend/src/exporter/ExportedInsight/ExportedInsight.tsx b/frontend/src/exporter/ExportedInsight/ExportedInsight.tsx index 8e37d85ccee66..0a287abee317f 100644 --- a/frontend/src/exporter/ExportedInsight/ExportedInsight.tsx +++ b/frontend/src/exporter/ExportedInsight/ExportedInsight.tsx @@ -74,7 +74,7 @@ export function ExportedInsight({
- +

{name || derived_name} diff --git a/frontend/src/exporter/Exporter.stories.tsx b/frontend/src/exporter/Exporter.stories.tsx index 7dba408f0cdd7..b4232e3836694 100644 --- a/frontend/src/exporter/Exporter.stories.tsx +++ b/frontend/src/exporter/Exporter.stories.tsx @@ -49,17 +49,20 @@ TrendsLineInsightLegend.args = { insight: require('../mocks/fixtures/api/projects/team_id/insights/trendsLine.json'), legend: true, } +TrendsLineInsight.tags = ['test-skip'] // doesn't produce a helpful reference image, as canvas can't be captured export const TrendsLineInsightDetailed: Story = Template.bind({}) TrendsLineInsightDetailed.args = { insight: require('../mocks/fixtures/api/projects/team_id/insights/trendsLine.json'), detailed: true, } +TrendsLineInsightDetailed.tags = ['test-skip'] // doesn't produce a helpful reference image, as canvas can't be captured /** This should not happen in the exporter, but if it does, it shouldn't error out - we want a clear message. */ export const TrendsLineInsightNoResults: Story = Template.bind({}) // @ts-expect-error TrendsLineInsightNoResults.args = { insight: { ...TrendsLineInsight.args.insight, result: null } } +TrendsLineInsightNoResults.tags = ['test-skip'] // doesn't produce a helpful reference image, as canvas can't be captured export const TrendsLineMultiInsight: Story = Template.bind({}) TrendsLineMultiInsight.args = { diff --git a/frontend/src/layout/navigation-3000/navigationLogic.tsx b/frontend/src/layout/navigation-3000/navigationLogic.tsx index 74d9ac5212cad..09fd77527c43e 100644 --- a/frontend/src/layout/navigation-3000/navigationLogic.tsx +++ b/frontend/src/layout/navigation-3000/navigationLogic.tsx @@ -8,6 +8,7 @@ import { IconHome, IconLive, IconLogomark, + IconMegaphone, IconNotebook, IconPeople, IconPieChart, @@ -526,6 +527,15 @@ export const navigation3000Logic = kea([ to: urls.pipeline(), } : null, + featureFlags[FEATURE_FLAGS.MESSAGING] && hasOnboardedAnyProduct + ? { + identifier: Scene.MessagingBroadcasts, + label: 'Messaging', + icon: , + to: urls.messagingBroadcasts(), + tag: 'alpha' as const, + } + : null, ].filter(isNotNil), ] }, diff --git a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelExperimentFeatureFlag.tsx b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelExperimentFeatureFlag.tsx index e15c10ce7b0e2..5002f2bd78929 100644 --- a/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelExperimentFeatureFlag.tsx +++ b/frontend/src/layout/navigation-3000/sidepanel/panels/SidePanelExperimentFeatureFlag.tsx @@ -1,3 +1,4 @@ +import { IconBalance } from '@posthog/icons' import { LemonBanner, LemonButton, LemonDivider, LemonInput, LemonTable, Link, Spinner } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { router } from 'kea-router' @@ -31,7 +32,8 @@ export const SidePanelExperimentFeatureFlag = (): JSX.Element => { const { experiment } = useValues(experimentLogic({ experimentId: experimentId ?? 'new' })) const _featureFlagLogic = featureFlagLogic({ id: experiment.feature_flag?.id ?? null } as FeatureFlagLogicProps) - const { featureFlag, areVariantRolloutsValid, variantRolloutSum, featureFlagLoading } = useValues(_featureFlagLogic) + const { featureFlag, areVariantRolloutsValid, variantRolloutSum, featureFlagLoading, nonEmptyVariants } = + useValues(_featureFlagLogic) const { setFeatureFlagFilters, saveSidebarExperimentFeatureFlag, distributeVariantsEqually } = useActions(_featureFlagLogic) @@ -99,8 +101,11 @@ export const SidePanelExperimentFeatureFlag = (): JSX.Element => { title: (
Rollout Percentage - - Redistribute + +
), @@ -121,6 +126,7 @@ export const SidePanelExperimentFeatureFlag = (): JSX.Element => { }} min={0} max={100} + suffix={%} /> ), }, @@ -138,6 +144,7 @@ export const SidePanelExperimentFeatureFlag = (): JSX.Element => { id={`${experiment.feature_flag?.id}`} filters={featureFlag?.filters ?? []} onChange={setFeatureFlagFilters} + nonEmptyFeatureFlagVariants={nonEmptyVariants} />
diff --git a/frontend/src/layout/navigation/EnvironmentSwitcher.tsx b/frontend/src/layout/navigation/EnvironmentSwitcher.tsx index 2432cb72c63f7..83c85092ff3ab 100644 --- a/frontend/src/layout/navigation/EnvironmentSwitcher.tsx +++ b/frontend/src/layout/navigation/EnvironmentSwitcher.tsx @@ -157,6 +157,17 @@ function determineProjectSwitchUrl(pathname: string, newTeamId: number): string // and after switching is on a different page than before. let route = removeProjectIdIfPresent(pathname) route = removeFlagIdIfPresent(route) + + // List of routes that should redirect to project home + // instead of keeping the current path. + const redirectToHomeRoutes = ['/products', '/onboarding'] + + const shouldRedirectToHome = redirectToHomeRoutes.some((redirectRoute) => route.includes(redirectRoute)) + + if (shouldRedirectToHome) { + return urls.project(newTeamId) // Go to project home + } + return urls.project(newTeamId, route) } diff --git a/frontend/src/lib/actionUtils.ts b/frontend/src/lib/actionUtils.ts index 87afffa1a0c4e..9096490300987 100644 --- a/frontend/src/lib/actionUtils.ts +++ b/frontend/src/lib/actionUtils.ts @@ -20,7 +20,6 @@ export const EXPERIMENT_TARGETS = [ 'del', 'details', 'dfn', - 'div', 'footer', 'header', 'ol', diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 5796e0e2e56df..a67918bfa0761 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -50,11 +50,11 @@ import { Experiment, ExportedAssetType, ExternalDataJob, + ExternalDataSource, ExternalDataSourceCreatePayload, ExternalDataSourceSchema, ExternalDataSourceSyncSchema, ExternalDataSourceType, - ExternalDataStripeSource, FeatureFlagAssociatedRoleType, FeatureFlagType, Group, @@ -63,6 +63,7 @@ import { HogFunctionStatus, HogFunctionTemplateType, HogFunctionType, + HogFunctionTypeType, InsightModel, IntegrationType, ListOrganizationMembersParams, @@ -480,12 +481,12 @@ class ApiRequest { } // Recordings - public recording(recordingId: SessionRecordingType['id'], teamId?: TeamType['id']): ApiRequest { - return this.environmentsDetail(teamId).addPathComponent('session_recordings').addPathComponent(recordingId) - } public recordings(teamId?: TeamType['id']): ApiRequest { return this.environmentsDetail(teamId).addPathComponent('session_recordings') } + public recording(recordingId: SessionRecordingType['id'], teamId?: TeamType['id']): ApiRequest { + return this.recordings(teamId).addPathComponent(recordingId) + } public recordingMatchingEvents(teamId?: TeamType['id']): ApiRequest { return this.environmentsDetail(teamId) .addPathComponent('session_recordings') @@ -748,7 +749,7 @@ class ApiRequest { } public subscription(id: SubscriptionType['id'], teamId?: TeamType['id']): ApiRequest { - return this.environmentsDetail(teamId).addPathComponent(id) + return this.subscriptions(teamId).addPathComponent(id) } // # Integrations @@ -854,7 +855,7 @@ class ApiRequest { return this.projectsDetail(teamId).addPathComponent('external_data_sources') } - public externalDataSource(sourceId: ExternalDataStripeSource['id'], teamId?: TeamType['id']): ApiRequest { + public externalDataSource(sourceId: ExternalDataSource['id'], teamId?: TeamType['id']): ApiRequest { return this.externalDataSources(teamId).addPathComponent(sourceId) } @@ -870,6 +871,9 @@ class ApiRequest { public insightVariables(teamId?: TeamType['id']): ApiRequest { return this.projectsDetail(teamId).addPathComponent('insight_variables') } + public insightVariable(variableId: string, teamId?: TeamType['id']): ApiRequest { + return this.insightVariables(teamId).addPathComponent(variableId) + } // ActivityLog public activity_log(teamId?: TeamType['id']): ApiRequest { @@ -1741,7 +1745,10 @@ const api = { }, }, hogFunctions: { - async list(params?: { filters?: any }): Promise> { + async list(params?: { + filters?: any + type?: HogFunctionTypeType + }): Promise> { return await new ApiRequest().hogFunctions().withQueryString(params).get() }, async get(id: HogFunctionType['id']): Promise { @@ -1771,9 +1778,11 @@ const api = { ): Promise { return await new ApiRequest().hogFunction(id).withAction('metrics/totals').withQueryString(params).get() }, - - async listTemplates(): Promise> { - return await new ApiRequest().hogFunctionTemplates().get() + async listTemplates(type?: HogFunctionTypeType): Promise> { + return new ApiRequest() + .hogFunctionTemplates() + .withQueryString({ type: type ?? 'destination' }) + .get() }, async getTemplate(id: HogFunctionTemplateType['id']): Promise { return await new ApiRequest().hogFunctionTemplate(id).get() @@ -2080,6 +2089,13 @@ const api = { ): Promise { return await new ApiRequest().batchExportRun(id, runId, teamId).withAction('retry').create() }, + async cancelRun( + id: BatchExportConfiguration['id'], + runId: BatchExportRun['id'], + teamId?: TeamType['id'] + ): Promise { + return await new ApiRequest().batchExportRun(id, runId, teamId).withAction('cancel').create() + }, async logs( id: BatchExportConfiguration['id'], params: LogEntryRequestParams = {} @@ -2196,25 +2212,25 @@ const api = { }, }, externalDataSources: { - async list(options?: ApiMethodOptions | undefined): Promise> { + async list(options?: ApiMethodOptions | undefined): Promise> { return await new ApiRequest().externalDataSources().get(options) }, - async get(sourceId: ExternalDataStripeSource['id']): Promise { + async get(sourceId: ExternalDataSource['id']): Promise { return await new ApiRequest().externalDataSource(sourceId).get() }, async create(data: Partial): Promise<{ id: string }> { return await new ApiRequest().externalDataSources().create({ data }) }, - async delete(sourceId: ExternalDataStripeSource['id']): Promise { + async delete(sourceId: ExternalDataSource['id']): Promise { await new ApiRequest().externalDataSource(sourceId).delete() }, - async reload(sourceId: ExternalDataStripeSource['id']): Promise { + async reload(sourceId: ExternalDataSource['id']): Promise { await new ApiRequest().externalDataSource(sourceId).withAction('reload').create() }, async update( - sourceId: ExternalDataStripeSource['id'], - data: Partial - ): Promise { + sourceId: ExternalDataSource['id'], + data: Partial + ): Promise { return await new ApiRequest().externalDataSource(sourceId).update({ data }) }, async database_schema( @@ -2236,7 +2252,7 @@ const api = { .create({ data: { source_type, prefix } }) }, async jobs( - sourceId: ExternalDataStripeSource['id'], + sourceId: ExternalDataSource['id'], before: string | null, after: string | null ): Promise { @@ -2304,9 +2320,12 @@ const api = { async list(options?: ApiMethodOptions | undefined): Promise> { return await new ApiRequest().insightVariables().get(options) }, - async create(data: Partial): Promise { + async create(data: Partial): Promise { return await new ApiRequest().insightVariables().create({ data }) }, + async update(variableId: string, data: Partial): Promise { + return await new ApiRequest().insightVariable(variableId).update({ data }) + }, }, subscriptions: { @@ -2430,8 +2449,7 @@ const api = { query: T, options?: ApiMethodOptions, queryId?: string, - refresh?: boolean, - async?: boolean, + refresh?: RefreshType, filtersOverride?: DashboardFilter | null, variablesOverride?: Record | null ): Promise< @@ -2441,13 +2459,12 @@ const api = { : T['response'] : Record > { - const refreshParam: RefreshType | undefined = refresh && async ? 'force_async' : async ? 'async' : refresh return await new ApiRequest().query().create({ ...options, data: { query, client_query_id: queryId, - refresh: refreshParam, + refresh, filters_override: filtersOverride, variables_override: variablesOverride, }, diff --git a/frontend/src/lib/components/Alerts/insightAlertsLogic.ts b/frontend/src/lib/components/Alerts/insightAlertsLogic.ts index 6bca4dc317fa1..f95c941eb3896 100644 --- a/frontend/src/lib/components/Alerts/insightAlertsLogic.ts +++ b/frontend/src/lib/components/Alerts/insightAlertsLogic.ts @@ -3,7 +3,7 @@ import { loaders } from 'kea-loaders' import api from 'lib/api' import { insightVizDataLogic } from 'scenes/insights/insightVizDataLogic' -import { GoalLine, InsightThresholdType } from '~/queries/schema' +import { AlertConditionType, GoalLine, InsightThresholdType } from '~/queries/schema' import { getBreakdown, isInsightVizNode, isTrendsQuery } from '~/queries/utils' import { InsightLogicProps } from '~/types' @@ -35,6 +35,7 @@ export const insightAlertsLogic = kea([ connect((props: InsightAlertsLogicProps) => ({ actions: [insightVizDataLogic(props.insightLogicProps), ['setQuery']], + values: [insightVizDataLogic(props.insightLogicProps), ['showAlertThresholdLines']], })), loaders(({ props }) => ({ @@ -62,11 +63,13 @@ export const insightAlertsLogic = kea([ selectors({ alertThresholdLines: [ - (s) => [s.alerts], - (alerts: AlertType[]): GoalLine[] => + (s) => [s.alerts, s.showAlertThresholdLines], + (alerts: AlertType[], showAlertThresholdLines: boolean): GoalLine[] => alerts.flatMap((alert) => { if ( + !showAlertThresholdLines || alert.threshold.configuration.type !== InsightThresholdType.ABSOLUTE || + alert.condition.type !== AlertConditionType.ABSOLUTE_VALUE || !alert.threshold.configuration.bounds ) { return [] @@ -75,14 +78,14 @@ export const insightAlertsLogic = kea([ const bounds = alert.threshold.configuration.bounds const thresholds = [] - if (bounds?.upper !== undefined) { + if (bounds?.upper != null) { thresholds.push({ label: `${alert.name} Upper Threshold`, value: bounds?.upper, }) } - if (bounds?.lower !== undefined) { + if (bounds?.lower != null) { thresholds.push({ label: `${alert.name} Lower Threshold`, value: bounds?.lower, diff --git a/frontend/src/lib/components/Alerts/views/Alerts.tsx b/frontend/src/lib/components/Alerts/views/Alerts.tsx index 004a848377edb..4bb1129fd1146 100644 --- a/frontend/src/lib/components/Alerts/views/Alerts.tsx +++ b/frontend/src/lib/components/Alerts/views/Alerts.tsx @@ -3,6 +3,7 @@ import { IconCheck } from '@posthog/icons' import { Tooltip } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { router } from 'kea-router' +import { FeedbackNotice } from 'lib/components/FeedbackNotice' import { DetectiveHog } from 'lib/components/hedgehogs' import { ProductIntroduction } from 'lib/components/ProductIntroduction/ProductIntroduction' import { LemonTable, LemonTableColumn, LemonTableColumns } from 'lib/lemon-ui/LemonTable' @@ -104,6 +105,8 @@ export function Alerts({ alertId }: AlertsProps): JSX.Element { // TODO: add info here to sign up for alerts early access return ( <> + + {alertsSortedByState.length === 0 && !alertsLoading && ( } - to={launchUrl(keyedURL.url)} + to={ + // toolbar urls are sent through the backend to be validated + // and have toolbar auth information added + type === AuthorizedUrlListType.TOOLBAR_URLS + ? launchUrl(keyedURL.url) + : // other urls are simply opened directly + keyedURL.url + } targetBlank tooltip={ type === AuthorizedUrlListType.TOOLBAR_URLS diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss b/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss index 9a8afaa08b7af..c61a7a179be63 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss +++ b/frontend/src/lib/components/Cards/InsightCard/InsightCard.scss @@ -108,10 +108,6 @@ } } -.InsightDetails__breakdown { - margin-bottom: 0.5rem; -} - .InsightDetails__footer { display: flex; flex-wrap: wrap; @@ -144,6 +140,7 @@ font-weight: 600; line-height: 1rem; color: var(--text-3000); + vertical-align: middle; background: var(--primary-highlight); border-radius: var(--radius); @@ -177,10 +174,6 @@ .SeriesDisplay__condition { display: flex; - - .SeriesDisplay__raw-name { - vertical-align: middle; - } } .SeriesDisplay__arrow { diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx index 9a0038556dea6..347fd3227ce40 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightDetails.tsx @@ -1,5 +1,6 @@ import { useValues } from 'kea' import { + convertPropertiesToPropertyGroup, formatPropertyLabel, isAnyPropertyfilter, isCohortPropertyFilter, @@ -41,13 +42,7 @@ import { isTrendsQuery, isValidBreakdown, } from '~/queries/utils' -import { - AnyPropertyFilter, - FilterLogicalOperator, - FilterType, - PropertyGroupFilter, - QueryBasedInsightModel, -} from '~/types' +import { AnyPropertyFilter, FilterLogicalOperator, PropertyGroupFilter, QueryBasedInsightModel } from '~/types' import { PropertyKeyInfo } from '../../PropertyKeyInfo' import { TZLabel } from '../../TZLabel' @@ -251,11 +246,11 @@ function PathsSummary({ query }: { query: PathsQuery }): JSX.Element { ) } -export function SeriesSummary({ query }: { query: InsightQueryNode }): JSX.Element { +export function SeriesSummary({ query, heading }: { query: InsightQueryNode; heading?: JSX.Element }): JSX.Element { return ( - <> -
Query summary
-
+
+
{heading || 'Query summary'}
+
{isTrendsQuery(query) && query.trendsFilter?.formula && ( <> } fullWidth> @@ -283,8 +278,8 @@ export function SeriesSummary({ query }: { query: InsightQueryNode }): JSX.Eleme Unavailable for this insight type. )}
-
- +
+ ) } @@ -293,44 +288,13 @@ export function PropertiesSummary({ }: { properties: PropertyGroupFilter | AnyPropertyFilter[] | undefined }): JSX.Element { - const groupFilter: PropertyGroupFilter | null = Array.isArray(properties) - ? { - type: FilterLogicalOperator.And, - values: [ - { - type: FilterLogicalOperator.And, - values: properties, - }, - ], - } - : properties || null - return ( - <> +
Filters
-
- -
- - ) -} - -export function LEGACY_FilterBasedBreakdownSummary({ filters }: { filters: Partial }): JSX.Element | null { - if (filters.breakdown_type == null || filters.breakdown == null) { - return null - } - - const breakdownArray = Array.isArray(filters.breakdown) ? filters.breakdown : [filters.breakdown] - - return ( - <> -
Breakdown by
-
- {breakdownArray.map((breakdown) => ( - - ))} -
- +
+ +
+
) } @@ -342,9 +306,9 @@ export function BreakdownSummary({ query }: { query: InsightQueryNode }): JSX.El const { breakdown_type, breakdown, breakdowns } = query.breakdownFilter return ( - <> +
Breakdown by
-
+
{Array.isArray(breakdowns) ? breakdowns.map((b) => ( @@ -355,8 +319,8 @@ export function BreakdownSummary({ query }: { query: InsightQueryNode }): JSX.El : [breakdown].map((b) => ( )))} -
- +

+ ) } diff --git a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx index 9ab7a431ff18c..5f6a526a19dd4 100644 --- a/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/InsightMeta.tsx @@ -97,7 +97,7 @@ export function InsightMeta({ refreshDisabledReason={refreshDisabledReason} setAreDetailsShown={setAreDetailsShown} areDetailsShown={areDetailsShown} - topHeading={} + topHeading={} meta={ <> diff --git a/frontend/src/lib/components/Cards/InsightCard/TopHeading.tsx b/frontend/src/lib/components/Cards/InsightCard/TopHeading.tsx index c9455d61067cc..e32c0ff48673a 100644 --- a/frontend/src/lib/components/Cards/InsightCard/TopHeading.tsx +++ b/frontend/src/lib/components/Cards/InsightCard/TopHeading.tsx @@ -1,7 +1,7 @@ import { dateFilterToText } from 'lib/utils' import { InsightTypeMetadata, QUERY_TYPES_METADATA } from 'scenes/saved-insights/SavedInsights' -import { NodeKind } from '~/queries/schema' +import { Node, NodeKind } from '~/queries/schema' import { containsHogQLQuery, dateRangeFor, @@ -9,11 +9,8 @@ import { isInsightQueryNode, isInsightVizNode, } from '~/queries/utils' -import { QueryBasedInsightModel } from '~/types' - -export function TopHeading({ insight }: { insight: QueryBasedInsightModel }): JSX.Element { - const { query } = insight +export function TopHeading({ query }: { query: Node | null }): JSX.Element { let insightType: InsightTypeMetadata if (query?.kind) { diff --git a/frontend/src/lib/components/CompactList/CompactList.tsx b/frontend/src/lib/components/CompactList/CompactList.tsx index 3d66e898e185f..52f831c034a87 100644 --- a/frontend/src/lib/components/CompactList/CompactList.tsx +++ b/frontend/src/lib/components/CompactList/CompactList.tsx @@ -10,7 +10,7 @@ import { themeLogic } from '~/layout/navigation-3000/themeLogic' import { EmptyMessage, EmptyMessageProps } from '../EmptyMessage/EmptyMessage' interface CompactListProps { - title: string + title: string | JSX.Element viewAllURL?: string loading: boolean items: any[] @@ -34,7 +34,7 @@ export function CompactList({ style={theme?.boxStyle} >
-

+

{title}

{viewAllURL && View all} diff --git a/frontend/src/lib/components/Playlist/Playlist.tsx b/frontend/src/lib/components/Playlist/Playlist.tsx index d7cc5ba845b4e..3d720f1f146e2 100644 --- a/frontend/src/lib/components/Playlist/Playlist.tsx +++ b/frontend/src/lib/components/Playlist/Playlist.tsx @@ -44,6 +44,7 @@ export type PlaylistProps = { onChangeSections?: (activeKeys: string[]) => void 'data-attr'?: string activeItemId?: string + controls?: JSX.Element | null } const CounterBadge = ({ children }: { children: React.ReactNode }): JSX.Element => ( @@ -70,6 +71,7 @@ export function Playlist< onSelect, onChangeSections, 'data-attr': dataAttr, + controls, }: PlaylistProps): JSX.Element { const [controlledActiveItemId, setControlledActiveItemId] = useState( selectInitialItem && sections[0].items[0] ? sections[0].items[0].id : null @@ -115,6 +117,7 @@ export function Playlist< setActiveItemId={onChangeActiveItem} onChangeSections={onChangeSections} emptyState={listEmptyState} + controls={controls} /> )} ['title'] notebooksHref: PlaylistProps['notebooksHref'] @@ -166,6 +170,7 @@ function List< onScrollListEdge: PlaylistProps['onScrollListEdge'] loading: PlaylistProps['loading'] emptyState: PlaylistProps['listEmptyState'] + controls?: JSX.Element | null }): JSX.Element { const [activeHeaderActionKey, setActiveHeaderActionKey] = useState(null) const lastScrollPositionRef = useRef(0) @@ -203,42 +208,49 @@ function List< return (
-
- } onClick={onClickCollapse} /> - - {title ? ( - - {title} - - ) : null} - - Showing {itemsCount} results. -
- Scrolling to the bottom or the top of the list will load older or newer results - respectively. - - } - > - - {Math.min(999, itemsCount)}+ - -
-
- {headerActions.map(({ key, icon, tooltip, children }) => ( +
+
setActiveHeaderActionKey(activeHeaderActionKey === key ? null : key)} - > - {children} - - ))} + icon={} + onClick={onClickCollapse} + /> + + {title ? ( + + {title} + + ) : null} + + Showing {itemsCount} results. +
+ Scrolling to the bottom or the top of the list will load older or newer results + respectively. + + } + > + + {Math.min(999, itemsCount)}+ + +
+
+ {headerActions.map(({ key, icon, tooltip, children }) => ( + setActiveHeaderActionKey(activeHeaderActionKey === key ? null : key)} + > + {children} + + ))} +
+ {controls ?
{controls}
: null}
diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx index 440000f5200b5..9f2ab2f83704f 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.stories.tsx @@ -1,4 +1,7 @@ import { Meta, StoryFn } from '@storybook/react' +import { useActions } from 'kea' +import { userPreferencesLogic } from 'lib/logic/userPreferencesLogic' +import { useEffect } from 'react' import { PropertyDefinitionType } from '~/types' @@ -46,7 +49,7 @@ export const DollarPropertiesOnEvent: StoryFn = () => { return } -export const DollarPropertiesOnPerson: StoryFn = () => { +export const DollarPropertiesOnPersonSearchable: StoryFn = () => { const properties = { pineapple_enjoyment_score: 3, $browser: 'Chrome', @@ -56,5 +59,24 @@ export const DollarPropertiesOnPerson: StoryFn = () => { $initial_utm_campaign: 'summer_sale', $initial_geoip_country_code: 'US', } - return + return +} + +export const DollarPropertiesOnPersonHidden: StoryFn = () => { + const { setHidePostHogPropertiesInTable } = useActions(userPreferencesLogic) + + useEffect(() => setHidePostHogPropertiesInTable(true), []) + + const properties = { + pineapple_enjoyment_score: 3, + $browser: 'Chrome', + utm_campaign: 'summer_sale', + $geoip_country_code: 'US', + $initial_browser: 'Chrome', + $initial_utm_campaign: 'summer_sale', + $initial_geoip_country_code: 'US', + } + return ( + + ) } diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx index b245d5db8bae4..38a196bdf0e44 100644 --- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx +++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx @@ -9,12 +9,12 @@ import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonTable, LemonTableColumns, LemonTableProps } from 'lib/lemon-ui/LemonTable' import { userPreferencesLogic } from 'lib/logic/userPreferencesLogic' import { + CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS, CORE_FILTER_DEFINITIONS_BY_GROUP, getCoreFilterDefinition, - NON_DOLLAR_POSTHOG_PROPERTY_KEYS, PROPERTY_KEYS, } from 'lib/taxonomy' -import { isURL } from 'lib/utils' +import { isObject, isURL } from 'lib/utils' import { useMemo, useState } from 'react' import { NewProperty } from 'scenes/persons/NewProperty' import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' @@ -180,6 +180,7 @@ function ValueDisplay({
) } + interface PropertiesTableType extends BasePropertyType { properties?: Record | Array> sortProperties?: boolean @@ -200,7 +201,7 @@ export function PropertiesTable({ properties, rootKey, onEdit, - sortProperties = false, + sortProperties = true, searchable = false, filterable = false, embedded = true, @@ -218,22 +219,36 @@ export function PropertiesTable({ const { isCloudOrDev } = useValues(preflightLogic) const objectProperties = useMemo(() => { - if (!properties || Array.isArray(properties)) { + if (!properties || Array.isArray(properties) || !isObject(properties)) { return [] } - let entries = Object.entries(properties).sort((a, b) => { - // if this is a posthog property we want to sort by its label - const left = getCoreFilterDefinition(a[0], TaxonomicFilterGroupType.EventProperties)?.label || a[0] - const right = getCoreFilterDefinition(b[0], TaxonomicFilterGroupType.EventProperties)?.label || b[0] - if (left < right) { - return -1 - } - if (left > right) { - return 1 - } - return 0 - }) + let entries = Object.entries(properties) + if (sortProperties) { + entries = entries.sort((a, b) => { + // if this is a posthog property we want to sort by its label + const propertyTypeMap: Record = { + [PropertyDefinitionType.Event]: TaxonomicFilterGroupType.EventProperties, + [PropertyDefinitionType.Person]: TaxonomicFilterGroupType.PersonProperties, + [PropertyDefinitionType.Group]: TaxonomicFilterGroupType.GroupsPrefix, + [PropertyDefinitionType.Session]: TaxonomicFilterGroupType.SessionProperties, + [PropertyDefinitionType.LogEntry]: TaxonomicFilterGroupType.LogEntries, + } + + const propertyType = propertyTypeMap[type] || TaxonomicFilterGroupType.EventProperties + + const left = getCoreFilterDefinition(a[0], propertyType)?.label || a[0] + const right = getCoreFilterDefinition(b[0], propertyType)?.label || b[0] + + if (left < right) { + return -1 + } + if (left > right) { + return 1 + } + return 0 + }) + } if (searchTerm) { const normalizedSearchTerm = searchTerm.toLowerCase() @@ -249,30 +264,13 @@ export function PropertiesTable({ if (filterable && hidePostHogPropertiesInTable) { entries = entries.filter(([key]) => { - const isPostHogProperty = key.startsWith('$') && PROPERTY_KEYS.includes(key) - const isNonDollarPostHogProperty = isCloudOrDev && NON_DOLLAR_POSTHOG_PROPERTY_KEYS.includes(key) + const isPostHogProperty = key.startsWith('$') || PROPERTY_KEYS.includes(key) + const isNonDollarPostHogProperty = isCloudOrDev && CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS.includes(key) return !isPostHogProperty && !isNonDollarPostHogProperty }) } - if (sortProperties) { - entries.sort(([aKey], [bKey]) => { - if (highlightedKeys) { - const aHighlightValue = highlightedKeys.includes(aKey) ? 0 : 1 - const bHighlightValue = highlightedKeys.includes(bKey) ? 0 : 1 - if (aHighlightValue !== bHighlightValue) { - return aHighlightValue - bHighlightValue - } - } - - if (aKey.startsWith('$') && !bKey.startsWith('$')) { - return 1 - } else if (!aKey.startsWith('$') && bKey.startsWith('$')) { - return -1 - } - return aKey.localeCompare(bKey) - }) - } else if (highlightedKeys) { + if (highlightedKeys) { entries.sort(([aKey], [bKey]) => { const aHighlightValue = highlightedKeys.includes(aKey) ? 0 : 1 const bHighlightValue = highlightedKeys.includes(bKey) ? 0 : 1 @@ -410,9 +408,10 @@ export function PropertiesTable({ {searchable && ( )} diff --git a/frontend/src/lib/components/Subscriptions/subscriptionLogic.test.ts b/frontend/src/lib/components/Subscriptions/subscriptionLogic.test.ts index 1adc197e2c03c..4af30359d6422 100644 --- a/frontend/src/lib/components/Subscriptions/subscriptionLogic.test.ts +++ b/frontend/src/lib/components/Subscriptions/subscriptionLogic.test.ts @@ -26,12 +26,11 @@ export const fixtureSubscriptionResponse = (id: number, args: Partial { let newLogic: ReturnType let existingLogic: ReturnType - let subscriptions: SubscriptionType[] = [] beforeEach(async () => { - subscriptions = [fixtureSubscriptionResponse(1), fixtureSubscriptionResponse(2)] useMocks({ get: { - '/api/projects/:team/subscriptions/1': fixtureSubscriptionResponse(1), + '/api/environments/:team/subscriptions': { count: 1, results: [fixtureSubscriptionResponse(1)] }, + '/api/environments/:team/subscriptions/1': fixtureSubscriptionResponse(1), '/api/projects/:team/integrations': { count: 0, results: [] }, }, }) @@ -42,12 +41,28 @@ describe('subscriptionLogic', () => { }) existingLogic = subscriptionLogic({ insightShortId: Insight1, - id: subscriptions[0].id, + id: 1, }) newLogic.mount() existingLogic.mount() }) + it('loads existing subscription', async () => { + router.actions.push('/insights/123/subscriptions/1') + await expectLogic(existingLogic).toFinishListeners().toDispatchActions(['loadSubscriptionSuccess']) + expect(existingLogic.values.subscription).toMatchObject({ + id: 1, + title: 'My example subscription', + target_type: 'email', + target_value: 'ben@posthog.com,geoff@other-company.com', + frequency: 'monthly', + interval: 2, + start_date: '2022-01-01T00:09:00', + byweekday: ['wednesday'], + bysetpos: 1, + }) + }) + it('updates values depending on frequency', async () => { router.actions.push('/insights/123/subscriptions/new') await expectLogic(newLogic).toFinishListeners() diff --git a/frontend/src/lib/components/Subscriptions/subscriptionLogic.ts b/frontend/src/lib/components/Subscriptions/subscriptionLogic.ts index a3b328435be0f..c3a2564efdeee 100644 --- a/frontend/src/lib/components/Subscriptions/subscriptionLogic.ts +++ b/frontend/src/lib/components/Subscriptions/subscriptionLogic.ts @@ -1,4 +1,4 @@ -import { connect, kea, key, listeners, path, props } from 'kea' +import { kea, key, listeners, path, props } from 'kea' import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import { beforeUnload, router, urlToAction } from 'kea-router' @@ -30,9 +30,6 @@ export const subscriptionLogic = kea([ path(['lib', 'components', 'Subscriptions', 'subscriptionLogic']), props({} as SubscriptionsLogicProps), key(({ id, insightShortId, dashboardId }) => `${insightShortId || dashboardId}-${id ?? 'new'}`), - connect(({ insightShortId, dashboardId }: SubscriptionsLogicProps) => ({ - actions: [subscriptionsLogic({ insightShortId, dashboardId }), ['loadSubscriptions']], - })), loaders(({ props }) => ({ subscription: { @@ -97,7 +94,9 @@ export const subscriptionLogic = kea([ router.actions.replace(urlForSubscription(updatedSub.id, props)) } - actions.loadSubscriptions() + // If a subscriptionsLogic for this insight/dashboard is mounted already, let's make sure + // this change is propagated to `subscriptions` there + subscriptionsLogic.findMounted(props)?.actions.loadSubscriptions() actions.loadSubscriptionSuccess(updatedSub) lemonToast.success(`Subscription saved.`) diff --git a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx index c65ffd600898f..3d5973f55d75e 100644 --- a/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx +++ b/frontend/src/lib/components/TaxonomicFilter/TaxonomicFilter.tsx @@ -36,6 +36,7 @@ export function TaxonomicFilter({ selectFirstItem = true, propertyAllowList, hideBehavioralCohorts, + showNumericalPropsOnly, }: TaxonomicFilterProps): JSX.Element { // Generate a unique key for each unique TaxonomicFilter that's rendered const taxonomicFilterLogicKey = useMemo( @@ -62,6 +63,7 @@ export function TaxonomicFilter({ metadataSource, propertyAllowList, hideBehavioralCohorts, + showNumericalPropsOnly, } const logic = taxonomicFilterLogic(taxonomicFilterLogicProps) diff --git a/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.test.ts b/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.test.ts index 25e2866afc2c3..bf9b55115cb69 100644 --- a/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.test.ts +++ b/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.test.ts @@ -66,6 +66,7 @@ describe('infiniteListLogic', () => { taxonomicFilterLogicKey: 'testList', listGroupType: TaxonomicFilterGroupType.Events, taxonomicGroupTypes: [TaxonomicFilterGroupType.Events], + showNumericalPropsOnly: false, } const logicWithProps = infiniteListLogic({ ...defaultProps, ...props }) logicWithProps.mount() @@ -98,6 +99,7 @@ describe('infiniteListLogic', () => { taxonomicFilterLogicKey: 'testList', listGroupType: TaxonomicFilterGroupType.Events, taxonomicGroupTypes: [TaxonomicFilterGroupType.Events], + showNumericalPropsOnly: false, }) logic.mount() }) @@ -180,11 +182,11 @@ describe('infiniteListLogic', () => { index: 0, remoteItems: partial({ count: 156 }), localItems: partial({ count: 1 }), - items: partial({ count: 157 }), + items: partial({ count: 101 }), }) - expectLogic(logic, () => logic.actions.moveUp()).toMatchValues({ index: 156 }) - expectLogic(logic, () => logic.actions.moveUp()).toMatchValues({ index: 155 }) - expectLogic(logic, () => logic.actions.moveDown()).toMatchValues({ index: 156 }) + expectLogic(logic, () => logic.actions.moveUp()).toMatchValues({ index: 100 }) + expectLogic(logic, () => logic.actions.moveUp()).toMatchValues({ index: 99 }) + expectLogic(logic, () => logic.actions.moveDown()).toMatchValues({ index: 100 }) expectLogic(logic, () => logic.actions.moveDown()).toMatchValues({ index: 0 }) expectLogic(logic, () => logic.actions.moveDown()).toMatchValues({ index: 1 }) expectLogic(logic, () => logic.actions.moveUp()).toMatchValues({ index: 0 }) @@ -196,7 +198,7 @@ describe('infiniteListLogic', () => { index: 0, remoteItems: partial({ count: 156 }), localItems: partial({ count: 1 }), - items: partial({ count: 157 }), + items: partial({ count: 101 }), }) await expectLogic(logic, () => @@ -237,6 +239,7 @@ describe('infiniteListLogic', () => { taxonomicFilterLogicKey: 'testList', listGroupType: TaxonomicFilterGroupType.Wildcards, taxonomicGroupTypes: [TaxonomicFilterGroupType.Events, TaxonomicFilterGroupType.Actions], + showNumericalPropsOnly: false, optionsFromProp: { wildcard: [{ name: 'first' }, { name: 'second' }], }, @@ -260,6 +263,7 @@ describe('infiniteListLogic', () => { listGroupType: TaxonomicFilterGroupType.EventProperties, eventNames: ['$pageview'], taxonomicGroupTypes: [TaxonomicFilterGroupType.EventProperties], + showNumericalPropsOnly: false, }) logic.mount() }) @@ -368,9 +372,9 @@ describe('infiniteListLogic', () => { isExpandable: false, isExpanded: true, isExpandableButtonSelected: false, - totalResultCount: 2, + totalResultCount: 3, totalExtraCount: 0, - totalListCount: 2, + totalListCount: 3, expandedCount: 0, remoteItems: partial({ count: 2, @@ -389,6 +393,7 @@ describe('infiniteListLogic', () => { taxonomicFilterLogicKey: 'test-element-list', listGroupType: TaxonomicFilterGroupType.Elements, taxonomicGroupTypes: [TaxonomicFilterGroupType.Elements], + showNumericalPropsOnly: false, }) logicWithProps.mount() diff --git a/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.ts b/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.ts index eb8ca78c45ec4..05c5a387cad0f 100644 --- a/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.ts +++ b/frontend/src/lib/components/TaxonomicFilter/infiniteListLogic.ts @@ -69,7 +69,7 @@ async function fetchCachedListResponse(path: string, searchParams: Record([ - props({} as InfiniteListLogicProps), + props({ showNumericalPropsOnly: false } as InfiniteListLogicProps), key((props) => `${props.taxonomicFilterLogicKey}-${props.listGroupType}`), path((key) => ['lib', 'components', 'TaxonomicFilter', 'infiniteListLogic', key]), connect((props: InfiniteListLogicProps) => ({ @@ -310,15 +310,34 @@ export const infiniteListLogic = kea([ }, ], items: [ - (s) => [s.remoteItems, s.localItems], - (remoteItems, localItems) => ({ - results: [...localItems.results, ...remoteItems.results], - count: localItems.count + remoteItems.count, - searchQuery: localItems.searchQuery, - expandedCount: remoteItems.expandedCount, - queryChanged: remoteItems.queryChanged, - first: localItems.first && remoteItems.first, - }), + (s, p) => [s.remoteItems, s.localItems, p.showNumericalPropsOnly ?? (() => false)], + (remoteItems, localItems, showNumericalPropsOnly) => { + const results = [...localItems.results, ...remoteItems.results].filter((n) => { + if (!showNumericalPropsOnly) { + return true + } + + if ('is_numerical' in n) { + return !!n.is_numerical + } + + if ('property_type' in n) { + const property_type = n.property_type as string // Data warehouse props dont conformt to PropertyType for some reason + return property_type === 'Integer' || property_type === 'Float' + } + + return true + }) + + return { + results, + count: results.length, + searchQuery: localItems.searchQuery, + expandedCount: remoteItems.expandedCount, + queryChanged: remoteItems.queryChanged, + first: localItems.first && remoteItems.first, + } + }, ], totalResultCount: [(s) => [s.items], (items) => items.count || 0], totalExtraCount: [ diff --git a/frontend/src/lib/components/TaxonomicFilter/types.ts b/frontend/src/lib/components/TaxonomicFilter/types.ts index 40931c4ef93e3..8bbfdb247a607 100644 --- a/frontend/src/lib/components/TaxonomicFilter/types.ts +++ b/frontend/src/lib/components/TaxonomicFilter/types.ts @@ -38,6 +38,7 @@ export interface TaxonomicFilterProps { propertyAllowList?: { [key in TaxonomicFilterGroupType]?: string[] } // only return properties in this list, currently only working for EventProperties and PersonProperties metadataSource?: AnyDataNode hideBehavioralCohorts?: boolean + showNumericalPropsOnly?: boolean } export interface TaxonomicFilterLogicProps extends TaxonomicFilterProps { diff --git a/frontend/src/lib/components/TaxonomicPopover/TaxonomicPopover.tsx b/frontend/src/lib/components/TaxonomicPopover/TaxonomicPopover.tsx index c7f9adf4ac81a..2f80f363b674d 100644 --- a/frontend/src/lib/components/TaxonomicPopover/TaxonomicPopover.tsx +++ b/frontend/src/lib/components/TaxonomicPopover/TaxonomicPopover.tsx @@ -26,6 +26,7 @@ export interface TaxonomicPopoverProps): JSX.Element { const [localValue, setLocalValue] = useState(value || ('' as ValueType)) @@ -94,6 +96,7 @@ export function TaxonomicPopover } matchWidth={false} diff --git a/frontend/src/lib/components/UniversalFilters/UniversalFilterButton.tsx b/frontend/src/lib/components/UniversalFilters/UniversalFilterButton.tsx index 049b4cb3da0ee..5b8c367f8291b 100644 --- a/frontend/src/lib/components/UniversalFilters/UniversalFilterButton.tsx +++ b/frontend/src/lib/components/UniversalFilters/UniversalFilterButton.tsx @@ -11,12 +11,11 @@ import React from 'react' import { cohortsModel } from '~/models/cohortsModel' import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel' -import { ActionFilter, AnyPropertyFilter } from '~/types' +import { ActionFilter, AnyPropertyFilter, FeaturePropertyFilter, UniversalFilterValue } from '~/types' import { EntityFilterInfo } from '../EntityFilterInfo' import { formatPropertyLabel } from '../PropertyFilters/utils' -import { UniversalFilterValue } from './UniversalFilters' -import { isActionFilter, isEditableFilter, isEventFilter } from './utils' +import { isActionFilter, isEditableFilter, isEventFilter, isFeatureFlagFilter } from './utils' export interface UniversalFilterButtonProps { onClick?: () => void @@ -33,7 +32,7 @@ export const UniversalFilterButton = React.forwardRef ) : isAction ? ( + ) : isFeatureFlag ? ( + ) : ( )} @@ -116,3 +117,7 @@ const EventLabel = ({
) } + +const FeatureFlagLabel = ({ filter }: { filter: FeaturePropertyFilter }): JSX.Element => { + return
{filter.key}
+} diff --git a/frontend/src/lib/components/UniversalFilters/UniversalFilters.tsx b/frontend/src/lib/components/UniversalFilters/UniversalFilters.tsx index b59471de24cc0..117c6b678c59e 100644 --- a/frontend/src/lib/components/UniversalFilters/UniversalFilters.tsx +++ b/frontend/src/lib/components/UniversalFilters/UniversalFilters.tsx @@ -3,7 +3,7 @@ import { LemonButton, LemonButtonProps, LemonDropdown, Popover } from '@posthog/ import { BindLogic, useActions, useValues } from 'kea' import { useState } from 'react' -import { ActionFilter, AnyPropertyFilter, FilterLogicalOperator } from '~/types' +import { UniversalFiltersGroup, UniversalFilterValue } from '~/types' import { TaxonomicPropertyFilter } from '../PropertyFilters/components/TaxonomicPropertyFilter' import { PropertyFilters } from '../PropertyFilters/PropertyFilters' @@ -14,14 +14,6 @@ import { UniversalFilterButton } from './UniversalFilterButton' import { universalFiltersLogic } from './universalFiltersLogic' import { isEditableFilter, isEventFilter } from './utils' -export interface UniversalFiltersGroup { - type: FilterLogicalOperator - values: UniversalFiltersGroupValue[] -} - -export type UniversalFiltersGroupValue = UniversalFiltersGroup | UniversalFilterValue -export type UniversalFilterValue = AnyPropertyFilter | ActionFilter - type UniversalFiltersProps = { rootKey: string group: UniversalFiltersGroup | null @@ -160,7 +152,7 @@ const AddFilterButton = (props: Omit setDropdownOpen(!dropdownOpen)} {...props} > - Add filter + {props?.title || 'Add filter'} ) diff --git a/frontend/src/lib/components/UniversalFilters/universalFiltersLogic.test.ts b/frontend/src/lib/components/UniversalFilters/universalFiltersLogic.test.ts index 82b52b2e38053..5434ff37f76e1 100644 --- a/frontend/src/lib/components/UniversalFilters/universalFiltersLogic.test.ts +++ b/frontend/src/lib/components/UniversalFilters/universalFiltersLogic.test.ts @@ -1,10 +1,15 @@ import { expectLogic } from 'kea-test-utils' import { initKeaTests } from '~/test/init' -import { AnyPropertyFilter, FilterLogicalOperator, PropertyFilterType, PropertyOperator } from '~/types' +import { + AnyPropertyFilter, + FilterLogicalOperator, + PropertyFilterType, + PropertyOperator, + UniversalFiltersGroup, +} from '~/types' import { TaxonomicFilterGroup, TaxonomicFilterGroupType } from '../TaxonomicFilter/types' -import { UniversalFiltersGroup } from './UniversalFilters' import { universalFiltersLogic } from './universalFiltersLogic' const propertyFilter: AnyPropertyFilter = { diff --git a/frontend/src/lib/components/UniversalFilters/universalFiltersLogic.ts b/frontend/src/lib/components/UniversalFilters/universalFiltersLogic.ts index 8afb3cfcfc1cc..943688c46f3b7 100644 --- a/frontend/src/lib/components/UniversalFilters/universalFiltersLogic.ts +++ b/frontend/src/lib/components/UniversalFilters/universalFiltersLogic.ts @@ -6,10 +6,17 @@ import { import { taxonomicFilterGroupTypeToEntityType } from 'scenes/insights/filters/ActionFilter/ActionFilterRow/ActionFilterRow' import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel' -import { ActionFilter, FilterLogicalOperator, PropertyFilterType } from '~/types' +import { + ActionFilter, + FeaturePropertyFilter, + FilterLogicalOperator, + PropertyFilterType, + PropertyOperator, + UniversalFiltersGroup, + UniversalFiltersGroupValue, +} from '~/types' import { TaxonomicFilterGroup, TaxonomicFilterGroupType, TaxonomicFilterValue } from '../TaxonomicFilter/types' -import { UniversalFiltersGroup, UniversalFiltersGroupValue } from './UniversalFilters' import type { universalFiltersLogicType } from './universalFiltersLogicType' export const DEFAULT_UNIVERSAL_GROUP_FILTER: UniversalFiltersGroup = { @@ -52,7 +59,7 @@ export const universalFiltersLogic = kea([ addGroupFilter: ( taxonomicGroup: TaxonomicFilterGroup, propertyKey: TaxonomicFilterValue, - item: { propertyFilterType?: PropertyFilterType; name?: string } + item: { propertyFilterType?: PropertyFilterType; name?: string; key?: string } ) => ({ taxonomicGroup, propertyKey, @@ -98,6 +105,7 @@ export const universalFiltersLogic = kea([ TaxonomicFilterGroupType.Cohorts, TaxonomicFilterGroupType.Elements, TaxonomicFilterGroupType.HogQLExpression, + TaxonomicFilterGroupType.FeatureFlags, ].includes(t) ), ], @@ -112,26 +120,39 @@ export const universalFiltersLogic = kea([ addGroupFilter: ({ taxonomicGroup, propertyKey, item }) => { const newValues = [...values.filterGroup.values] - const propertyType = item.propertyFilterType ?? taxonomicFilterTypeToPropertyFilterType(taxonomicGroup.type) - if (propertyKey && propertyType) { - const newPropertyFilter = createDefaultPropertyFilter( - {}, - propertyKey, - propertyType, - taxonomicGroup, - values.describeProperty - ) - newValues.push(newPropertyFilter) + if (taxonomicGroup.type === TaxonomicFilterGroupType.FeatureFlags) { + if (!item.key) { + return + } + const newFeatureFlagFilter: FeaturePropertyFilter = { + type: PropertyFilterType.Feature, + key: item.key, + operator: PropertyOperator.Exact, + } + newValues.push(newFeatureFlagFilter) } else { - const entityType = taxonomicFilterGroupTypeToEntityType(taxonomicGroup.type) - if (entityType) { - const newEntityFilter: ActionFilter = { - id: propertyKey, - name: item?.name ?? '', - type: entityType, - } + const propertyType = + item.propertyFilterType ?? taxonomicFilterTypeToPropertyFilterType(taxonomicGroup.type) + if (propertyKey && propertyType) { + const newPropertyFilter = createDefaultPropertyFilter( + {}, + propertyKey, + propertyType, + taxonomicGroup, + values.describeProperty + ) + newValues.push(newPropertyFilter) + } else { + const entityType = taxonomicFilterGroupTypeToEntityType(taxonomicGroup.type) + if (entityType) { + const newEntityFilter: ActionFilter = { + id: propertyKey, + name: item?.name ?? '', + type: entityType, + } - newValues.push(newEntityFilter) + newValues.push(newEntityFilter) + } } } actions.setGroupValues(newValues) diff --git a/frontend/src/lib/components/UniversalFilters/utils.ts b/frontend/src/lib/components/UniversalFilters/utils.ts index f8b63af80ce5e..56eaa52f5f6d9 100644 --- a/frontend/src/lib/components/UniversalFilters/utils.ts +++ b/frontend/src/lib/components/UniversalFilters/utils.ts @@ -1,7 +1,15 @@ -import { ActionFilter, FilterLogicalOperator, LogEntryPropertyFilter, RecordingPropertyFilter } from '~/types' +import { + ActionFilter, + FeaturePropertyFilter, + FilterLogicalOperator, + LogEntryPropertyFilter, + RecordingPropertyFilter, + UniversalFiltersGroup, + UniversalFiltersGroupValue, + UniversalFilterValue, +} from '~/types' import { isCohortPropertyFilter } from '../PropertyFilters/utils' -import { UniversalFiltersGroup, UniversalFiltersGroupValue, UniversalFilterValue } from './UniversalFilters' export function isUniversalGroupFilterLike(filter?: UniversalFiltersGroupValue): filter is UniversalFiltersGroup { return filter?.type === FilterLogicalOperator.And || filter?.type === FilterLogicalOperator.Or @@ -15,6 +23,9 @@ export function isEventFilter(filter: UniversalFilterValue): filter is ActionFil export function isActionFilter(filter: UniversalFilterValue): filter is ActionFilter { return filter.type === 'actions' } +export function isFeatureFlagFilter(filter: UniversalFilterValue): filter is FeaturePropertyFilter { + return filter.type === 'feature' +} export function isRecordingPropertyFilter(filter: UniversalFilterValue): filter is RecordingPropertyFilter { return filter.type === 'recording' } diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 36db4c50cdc65..cc6a67fc504a2 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -215,11 +215,13 @@ export const FEATURE_FLAGS = { INSIGHT_VARIABLES: 'insight_variables', // owner: @Gilbert09 #team-data-warehouse WEB_EXPERIMENTS: 'web-experiments', // owner: @team-feature-success BIGQUERY_DWH: 'bigquery-dwh', // owner: @Gilbert09 #team-data-warehouse - REPLAY_DEFAULT_SORT_ORDER_EXPERIMENT: 'replay-order-by-experiment', // owner: #team-replay ENVIRONMENTS: 'environments', // owner: @Twixes #team-product-analytics BILLING_PAYMENT_ENTRY_IN_APP: 'billing-payment-entry-in-app', // owner: @zach LEGACY_ACTION_WEBHOOKS: 'legacy-action-webhooks', // owner: @mariusandra #team-cdp SESSION_REPLAY_URL_TRIGGER: 'session-replay-url-trigger', // owner: @richard-better #team-replay + REPLAY_TEMPLATES: 'replay-templates', // owner: @raquelmsmith #team-replay + EXPERIMENTS_HOLDOUTS: 'experiments-holdouts', // owner: @jurajmajerik #team-experiments + MESSAGING: 'messaging', // owner @mariusandra #team-cdp } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/lemon-ui/LemonCard/LemonCard.tsx b/frontend/src/lib/lemon-ui/LemonCard/LemonCard.tsx index 0ac8c31c817ee..c7f86768b3bfe 100644 --- a/frontend/src/lib/lemon-ui/LemonCard/LemonCard.tsx +++ b/frontend/src/lib/lemon-ui/LemonCard/LemonCard.tsx @@ -1,5 +1,9 @@ import './LemonCard.scss' +import { IconX } from '@posthog/icons' + +import { LemonButton } from '../LemonButton' + export interface LemonCardProps { hoverEffect?: boolean className?: string @@ -7,6 +11,8 @@ export interface LemonCardProps { onClick?: () => void focused?: boolean 'data-attr'?: string + closeable?: boolean + onClose?: () => void } export function LemonCard({ @@ -15,16 +21,31 @@ export function LemonCard({ children, onClick, focused, + closeable, + onClose, ...props }: LemonCardProps): JSX.Element { return (
+ {closeable ? ( +
+ } + onClick={(e) => { + e.stopPropagation() + onClose?.() + }} + type="tertiary" + size="xsmall" + /> +
+ ) : null} {children}
) diff --git a/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss b/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss index 6af35383fcc7e..b991fcd6121a3 100644 --- a/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss +++ b/frontend/src/lib/lemon-ui/LemonCheckbox/LemonCheckbox.scss @@ -1,4 +1,6 @@ .LemonCheckbox { + --lemon-checkbox-height: calc(2.125rem + 3px); // Medium size button height + button shadow height; + display: flex; align-items: center; width: fit-content; @@ -88,7 +90,7 @@ line-height: 1.4; label { - min-height: 2.5rem; + min-height: var(--lemon-checkbox-height); padding: 0 0.75rem; background: var(--bg-light); border: 1px solid var(--border); diff --git a/frontend/src/lib/lemon-ui/colors.stories.tsx b/frontend/src/lib/lemon-ui/colors.stories.tsx index cb3566640b7a7..f2e87c73528af 100644 --- a/frontend/src/lib/lemon-ui/colors.stories.tsx +++ b/frontend/src/lib/lemon-ui/colors.stories.tsx @@ -63,13 +63,123 @@ const preThousand = [ ] const threeThousand = [ + 'primary', + 'danger-highlight', + 'danger-lighter', + 'danger-light', + 'danger', + 'danger-dark', + 'warning-highlight', + 'warning', + 'warning-dark', + 'highlight', + 'success-highlight', + 'success-light', + 'success', + 'success-dark', + 'muted', + 'muted-alt', + 'mark', + 'white', + 'bg-light', + 'side', + 'mid', + 'border', + 'border-light', + 'border-bold', + 'transparent', + 'link', + // Colors of the PostHog logo + 'brand-blue', + 'brand-red', + 'brand-yellow', + 'brand-key', + + // PostHog 3000 + 'text-3000-light', + 'text-secondary-3000-light', + 'muted-3000-light', + 'trace-3000-light', + 'primary-3000-light', + 'primary-highlight-light', + 'primary-3000-hover-light', + 'primary-3000-active-light', + + 'secondary-3000-light', + 'secondary-3000-hover-light', + 'accent-3000-light', + 'bg-3000-light', + 'border-3000-light', + 'border-bold-3000-light', + 'glass-bg-3000-light', + 'glass-border-3000-light', + + 'link-3000-light', + 'primary-3000-frame-bg-light', + 'primary-3000-button-bg-light', + 'primary-3000-button-border-light', + 'primary-3000-button-border-hover-light', + + 'secondary-3000-frame-bg-light', + 'secondary-3000-button-bg-light', + 'secondary-3000-button-border-light', + 'secondary-3000-button-border-hover-light', + + 'danger-3000-frame-bg-light', + 'danger-3000-button-border-light', + 'danger-3000-button-border-hover-light', + + 'shadow-elevation-3000-light', + 'shadow-elevation-3000-dark', + 'text-3000-dark', + 'text-secondary-3000-dark', + 'muted-3000-dark', + 'trace-3000-dark', + 'primary-3000-dark', + 'primary-highlight-dark', + 'primary-3000-hover-dark', + 'primary-3000-active-dark', + 'primary-alt-highlight-light', + + 'secondary-3000-dark', + 'secondary-3000-hover-dark', + 'accent-3000-dark', + 'bg-3000-dark', + 'border-3000-dark', + 'border-bold-3000-dark', + 'glass-bg-3000-dark', + 'glass-border-3000-dark', + 'link-3000-dark', + + 'primary-3000-frame-bg-dark', + 'primary-3000-button-bg-dark', + 'primary-3000-button-border-dark', + 'primary-3000-button-border-hover-dark', + 'primary-alt-highlight-dark', + + 'secondary-3000-frame-bg-dark', + 'secondary-3000-button-bg-dark', + 'secondary-3000-button-border-dark', + 'secondary-3000-button-border-hover-dark', + + 'danger-3000-frame-bg-dark', + 'danger-3000-button-border-dark', + 'danger-3000-button-border-hover-dark', + + // The derived colors + // `--default` is a pre-3000 alias for "default text color" (`--text-3000` now) + 'default', 'text-3000', + 'text-secondary-3000', 'muted-3000', 'primary-3000', 'secondary-3000', 'secondary-3000-hover', 'accent-3000', 'bg-3000', + 'primary-highlight', + 'primary-alt-highlight', + 'primary-alt', ] export function ColorPalette(): JSX.Element { @@ -147,7 +257,7 @@ export function AllThreeThousandColorOptions(): JSX.Element { render: function RenderColor(color) { return (
-
+
) }, @@ -159,7 +269,7 @@ export function AllThreeThousandColorOptions(): JSX.Element { render: function RenderColor(color) { return (
-
+
) }, diff --git a/frontend/src/lib/lemon-ui/icons/categories.ts b/frontend/src/lib/lemon-ui/icons/categories.ts index 1b7b97b39abe5..879673b1c94c1 100644 --- a/frontend/src/lib/lemon-ui/icons/categories.ts +++ b/frontend/src/lib/lemon-ui/icons/categories.ts @@ -198,7 +198,7 @@ export const TEAMS_AND_COMPANIES = { 'IconRewindPlay', 'IconVideoCamera', ], - 'Feature Success': ['IconFlask', 'IconTestTube', 'IconMultivariateTesting', 'IconSplitTesting'], + 'Feature Success': ['IconFlask', 'IconTestTube', 'IconMultivariateTesting', 'IconSplitTesting', 'IconBalance'], Pipeline: ['IconWebhooks', 'IconDecisionTree'], 'Product OS': ['IconNotebook', 'IconHogQL', 'IconDashboard', 'IconSupport'], Logos: ['IconLogomark', 'IconGithub'], diff --git a/frontend/src/lib/taxonomy.tsx b/frontend/src/lib/taxonomy.tsx index 76c60960eec8e..a578556789410 100644 --- a/frontend/src/lib/taxonomy.tsx +++ b/frontend/src/lib/taxonomy.tsx @@ -158,7 +158,7 @@ export const CORE_FILTER_DEFINITIONS_BY_GROUP = { description: 'Automatically captured exceptions from the client Sentry integration', }, $web_vitals: { - label: 'Web vitals', + label: 'Web Vitals', description: 'Automatically captured web vitals data', }, // Mobile SDKs events @@ -1417,7 +1417,7 @@ export const PROPERTY_KEYS = Object.keys(CORE_FILTER_DEFINITIONS_BY_GROUP.event_ * but often more numerous than actual properties set on events and useful to hide * to make those properties discoverable */ -export const NON_DOLLAR_POSTHOG_PROPERTY_KEYS = [ +export const CLOUD_INTERNAL_POSTHOG_PROPERTY_KEYS = [ 'billing_period_end', 'billing_period_start', 'current_amount_usd.data_warehouse', @@ -1443,6 +1443,13 @@ export const NON_DOLLAR_POSTHOG_PROPERTY_KEYS = [ 'custom_limits.product_analytics', 'custom_limits.session_replay', 'custom_limits.surveys', + 'custom_limits_usd.data_warehouse', + 'custom_limits_usd.feature_flags', + 'custom_limits_usd.integrations', + 'custom_limits_usd.platform_and_support', + 'custom_limits_usd.product_analytics', + 'custom_limits_usd.session_replay', + 'custom_limits_usd.surveys', 'free_allocation.data_warehouse', 'free_allocation.feature_flags', 'free_allocation.integrations', diff --git a/frontend/src/queries/Query/Query.tsx b/frontend/src/queries/Query/Query.tsx index 7f6e4926f1ca7..ce5785ea88d9d 100644 --- a/frontend/src/queries/Query/Query.tsx +++ b/frontend/src/queries/Query/Query.tsx @@ -74,7 +74,7 @@ export function Query(props: QueryProps): JSX.Element | null }, [propsQuery]) const query = readOnly ? propsQuery : localQuery - const setQuery = readOnly ? undefined : propsSetQuery ?? localSetQuery + const setQuery = propsSetQuery ?? localSetQuery const queryContext = props.context || {} @@ -98,17 +98,18 @@ export function Query(props: QueryProps): JSX.Element | null component = ( void) | undefined} + setQuery={setQuery as unknown as (query: DataTableNode) => void} context={queryContext} cachedResults={props.cachedResults} uniqueKey={uniqueKey} + readOnly={readOnly} /> ) } else if (isDataVisualizationNode(query)) { component = ( void) | undefined} + setQuery={setQuery as unknown as (query: DataVisualizationNode) => void} cachedResults={props.cachedResults} uniqueKey={uniqueKey} context={queryContext} @@ -117,12 +118,12 @@ export function Query(props: QueryProps): JSX.Element | null /> ) } else if (isSavedInsightNode(query)) { - component = + component = } else if (isInsightVizNode(query)) { component = ( void) | undefined} + setQuery={setQuery as unknown as (query: InsightVizNode) => void} context={queryContext} readOnly={readOnly} uniqueKey={uniqueKey} @@ -135,13 +136,7 @@ export function Query(props: QueryProps): JSX.Element | null } else if (isWebOverviewQuery(query)) { component = } else if (isHogQuery(query)) { - component = ( - void)} - queryKey={String(uniqueKey)} - /> - ) + component = void} queryKey={String(uniqueKey)} /> } else { component = } diff --git a/frontend/src/queries/nodes/DataTable/DataTable.tsx b/frontend/src/queries/nodes/DataTable/DataTable.tsx index 335cd7d6730f7..12e4193fe35dc 100644 --- a/frontend/src/queries/nodes/DataTable/DataTable.tsx +++ b/frontend/src/queries/nodes/DataTable/DataTable.tsx @@ -68,7 +68,7 @@ import { DataTableOpenEditor } from './DataTableOpenEditor' interface DataTableProps { uniqueKey?: string | number query: DataTableNode - setQuery?: (query: DataTableNode) => void + setQuery: (query: DataTableNode) => void /** Custom table columns */ context?: QueryContext /* Cached Results are provided when shared or exported, @@ -76,6 +76,7 @@ interface DataTableProps { cachedResults?: AnyResponseType // Override the data logic node key if needed dataNodeLogicKey?: string + readOnly?: boolean } const eventGroupTypes = [ @@ -88,7 +89,14 @@ const personGroupTypes = [TaxonomicFilterGroupType.HogQLExpression, TaxonomicFil let uniqueNode = 0 -export function DataTable({ uniqueKey, query, setQuery, context, cachedResults }: DataTableProps): JSX.Element { +export function DataTable({ + uniqueKey, + query, + setQuery, + context, + cachedResults, + readOnly, +}: DataTableProps): JSX.Element { const [uniqueNodeKey] = useState(() => uniqueNode++) const [dataKey] = useState(() => `DataNode.${uniqueKey || uniqueNodeKey}`) const insightProps: InsightLogicProps = context?.insightProps || { @@ -148,7 +156,7 @@ export function DataTable({ uniqueKey, query, setQuery, context, cachedResults } showTimings, } = queryWithDefaults - const isReadOnly = setQuery === undefined + const isReadOnly = !!readOnly const eventActionsColumnShown = showActions && sourceFeatures.has(QueryFeature.eventActionsColumn) && columnsInResponse?.includes('*') diff --git a/frontend/src/queries/nodes/DataTable/renderColumnMeta.tsx b/frontend/src/queries/nodes/DataTable/renderColumnMeta.tsx index cf29b0888d347..1905862986584 100644 --- a/frontend/src/queries/nodes/DataTable/renderColumnMeta.tsx +++ b/frontend/src/queries/nodes/DataTable/renderColumnMeta.tsx @@ -4,9 +4,9 @@ import { SortingIndicator } from 'lib/lemon-ui/LemonTable/sorting' import { getQueryFeatures, QueryFeature } from '~/queries/nodes/DataTable/queryFeatures' import { extractExpressionComment } from '~/queries/nodes/DataTable/utils' -import { DataTableNode, EventsQuery } from '~/queries/schema' +import { DataTableNode, DataVisualizationNode, EventsQuery } from '~/queries/schema' import { QueryContext } from '~/queries/types' -import { isHogQLQuery, trimQuotes } from '~/queries/utils' +import { isDataTableNode, isHogQLQuery, trimQuotes } from '~/queries/utils' export interface ColumnMeta { title?: JSX.Element | string @@ -14,7 +14,11 @@ export interface ColumnMeta { align?: 'left' | 'right' | 'center' } -export function renderColumnMeta(key: string, query: DataTableNode, context?: QueryContext): ColumnMeta { +export function renderColumnMeta( + key: string, + query: T, + context?: QueryContext +): ColumnMeta { let width: string | number | undefined let title: JSX.Element | string | undefined const queryFeatures = getQueryFeatures(query.source) @@ -34,7 +38,8 @@ export function renderColumnMeta(key: string, query: DataTableNode, context?: Qu } if (title.startsWith("tuple('__hx_tag', '")) { const tagName = title.substring(19, title.indexOf("'", 19)) - title = tagName === '__hx_obj' ? 'Object' : '<' + tagName + ' />' + title = + tagName === '__hx_obj' ? 'Object' : tagName === 'RecordingButton' ? 'Recording' : '<' + tagName + ' />' } } else if (key === 'timestamp') { title = 'Time' @@ -87,7 +92,8 @@ export function renderColumnMeta(key: string, query: DataTableNode, context?: Qu title = Component ? : context?.columns?.[key]?.title } - if (queryFeatures.has(QueryFeature.selectAndOrderByColumns) && !query.allowSorting) { + if (queryFeatures.has(QueryFeature.selectAndOrderByColumns) && isDataTableNode(query) && !query.allowSorting) { + query const sortKey = queryFeatures.has(QueryFeature.selectAndOrderByColumns) ? (query.source as EventsQuery)?.orderBy?.[0] : null diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Table.tsx b/frontend/src/queries/nodes/DataVisualization/Components/Table.tsx index 967a9666b5940..d0a4af4d22ff8 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Table.tsx +++ b/frontend/src/queries/nodes/DataVisualization/Components/Table.tsx @@ -10,6 +10,7 @@ import { QueryContext } from '~/queries/types' import { LoadNext } from '../../DataNode/LoadNext' import { renderColumn } from '../../DataTable/renderColumn' +import { renderColumnMeta } from '../../DataTable/renderColumnMeta' import { convertTableValue, dataVisualizationLogic, TableDataCell } from '../dataVisualizationLogic' interface TableProps { @@ -33,62 +34,67 @@ export const Table = (props: TableProps): JSX.Element => { } = useValues(dataVisualizationLogic) const tableColumns: LemonTableColumn[], any>[] = tabularColumns.map( - ({ column, settings }, index) => ({ - title: settings?.display?.label || column.name, - render: (_, data, recordIndex: number) => { - return renderColumn(column.name, data[index].formattedValue, data, recordIndex, { - kind: NodeKind.DataTableNode, - source: props.query.source, - }) - }, - style: (_, data) => { - const cf = conditionalFormattingRules - .filter((n) => n.columnName === column.name) - .map((n) => { - const res = execHog(n.bytecode, { - globals: { - value: data[index].value, - input: convertTableValue(n.input, column.type.name), - }, - functions: {}, - maxAsyncSteps: 0, - }) + ({ column, settings }, index) => { + const { title, ...columnMeta } = renderColumnMeta(column.name, props.query, props.context) - return { - rule: n, - result: res.result, - } + return { + ...columnMeta, + title: settings?.display?.label || title || column.name, + render: (_, data, recordIndex: number) => { + return renderColumn(column.name, data[index].formattedValue, data, recordIndex, { + kind: NodeKind.DataTableNode, + source: props.query.source, }) + }, + style: (_, data) => { + const cf = conditionalFormattingRules + .filter((n) => n.columnName === column.name) + .map((n) => { + const res = execHog(n.bytecode, { + globals: { + value: data[index].value, + input: convertTableValue(n.input, column.type.name), + }, + functions: {}, + maxAsyncSteps: 0, + }) + + return { + rule: n, + result: res.result, + } + }) - const conditionalFormattingMatches = cf.find((n) => Boolean(n.result)) + const conditionalFormattingMatches = cf.find((n) => Boolean(n.result)) - if (conditionalFormattingMatches) { - const ruleColor = conditionalFormattingMatches.rule.color - const colorMode = conditionalFormattingMatches.rule.colorMode ?? 'light' + if (conditionalFormattingMatches) { + const ruleColor = conditionalFormattingMatches.rule.color + const colorMode = conditionalFormattingMatches.rule.colorMode ?? 'light' - // If the color mode matches the current theme, return as it was saved - if ((colorMode === 'dark' && isDarkModeOn) || (colorMode === 'light' && !isDarkModeOn)) { - return { - backgroundColor: ruleColor, + // If the color mode matches the current theme, return as it was saved + if ((colorMode === 'dark' && isDarkModeOn) || (colorMode === 'light' && !isDarkModeOn)) { + return { + backgroundColor: ruleColor, + } } - } - // If the color mode is dark, but we're in light mode - then lighten the color - if (colorMode === 'dark' && !isDarkModeOn) { - return { - backgroundColor: lightenDarkenColor(ruleColor, 30), + // If the color mode is dark, but we're in light mode - then lighten the color + if (colorMode === 'dark' && !isDarkModeOn) { + return { + backgroundColor: lightenDarkenColor(ruleColor, 30), + } } - } - // If the color mode is light, but we're in dark mode - then darken the color - return { - backgroundColor: lightenDarkenColor(ruleColor, -30), + // If the color mode is light, but we're in dark mode - then darken the color + return { + backgroundColor: lightenDarkenColor(ruleColor, -30), + } } - } - return undefined - }, - }) + return undefined + }, + } + } ) return ( diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/AddVariableButton.tsx b/frontend/src/queries/nodes/DataVisualization/Components/Variables/AddVariableButton.tsx index b80618f2e823f..a4bbed9d1d3e7 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/AddVariableButton.tsx +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/AddVariableButton.tsx @@ -5,14 +5,14 @@ import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { dataVisualizationLogic } from '../../dataVisualizationLogic' -import { addVariableLogic } from './addVariableLogic' import { NewVariableModal } from './NewVariableModal' +import { variableModalLogic } from './variableModalLogic' import { variablesLogic } from './variablesLogic' export const AddVariableButton = (): JSX.Element => { const { showEditingUI } = useValues(dataVisualizationLogic) const { featureFlags } = useValues(featureFlagLogic) - const { openModal } = useActions(addVariableLogic) + const { openNewVariableModal } = useActions(variableModalLogic) const { variables, variablesLoading } = useValues(variablesLogic) const { addVariable } = useActions(variablesLogic) @@ -30,19 +30,19 @@ export const AddVariableButton = (): JSX.Element => { items: [ { label: 'String', - onClick: () => openModal('String'), + onClick: () => openNewVariableModal('String'), }, { label: 'Number', - onClick: () => openModal('Number'), + onClick: () => openNewVariableModal('Number'), }, { label: 'Boolean', - onClick: () => openModal('Boolean'), + onClick: () => openNewVariableModal('Boolean'), }, { label: 'List', - onClick: () => openModal('List'), + onClick: () => openNewVariableModal('List'), }, ], }, @@ -57,7 +57,7 @@ export const AddVariableButton = (): JSX.Element => { ] : variables.map((n) => ({ label: n.name, - onClick: () => addVariable({ variableId: n.id, code_name: '' }), + onClick: () => addVariable({ variableId: n.id, code_name: n.code_name }), })), }, ]} diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/NewVariableModal.tsx b/frontend/src/queries/nodes/DataVisualization/Components/Variables/NewVariableModal.tsx index b7386fd745d5a..95c0d66a1c3d9 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/NewVariableModal.tsx +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/NewVariableModal.tsx @@ -10,7 +10,7 @@ import { useActions, useValues } from 'kea' import { LemonField } from 'lib/lemon-ui/LemonField' import { Variable } from '../../types' -import { addVariableLogic } from './addVariableLogic' +import { variableModalLogic } from './variableModalLogic' const renderVariableSpecificFields = ( variable: Variable, @@ -95,12 +95,14 @@ const renderVariableSpecificFields = ( } export const NewVariableModal = (): JSX.Element => { - const { closeModal, updateVariable, save } = useActions(addVariableLogic) - const { isModalOpen, variable } = useValues(addVariableLogic) + const { closeModal, updateVariable, save } = useActions(variableModalLogic) + const { isModalOpen, variable, modalType } = useValues(variableModalLogic) + + const title = modalType === 'new' ? `New ${variable.type} variable` : `Editing ${variable.name}` return ( { @@ -49,6 +50,7 @@ export const VariablesForInsight = (): JSX.Element => { const { updateVariableValue, removeVariable } = useActions(variablesLogic) const { showEditingUI } = useValues(dataVisualizationLogic) const { variableOverridesAreSet } = useValues(dataNodeLogic) + const { openExistingVariableModal } = useActions(variableModalLogic) if (!featureFlags[FEATURE_FLAGS.INSIGHT_VARIABLES] || !variablesForInsight.length || !showVariablesBar) { return <> @@ -65,6 +67,7 @@ export const VariablesForInsight = (): JSX.Element => { onChange={updateVariableValue} onRemove={removeVariable} variableOverridesAreSet={variableOverridesAreSet} + variableSettingsOnClick={() => openExistingVariableModal(n)} /> ))}
@@ -79,6 +82,7 @@ interface VariableInputProps { closePopover: () => void onChange: (variableId: string, value: any) => void onRemove?: (variableId: string) => void + variableSettingsOnClick?: () => void } const VariableInput = ({ @@ -87,8 +91,17 @@ const VariableInput = ({ closePopover, onChange, onRemove, + variableSettingsOnClick, }: VariableInputProps): JSX.Element => { - const [localInputValue, setLocalInputValue] = useState(variable.value ?? variable.default_value ?? '') + const [localInputValue, setLocalInputValue] = useState(() => { + const val = variable.value ?? variable.default_value + + if (variable.type === 'Number' && !val) { + return 0 + } + + return val ?? '' + }) const inputRef = useRef(null) const codeRef = useRef(null) @@ -102,17 +115,58 @@ const VariableInput = ({ return (
- setLocalInputValue(value)} - onPressEnter={() => { - onChange(variable.id, localInputValue) - closePopover() - }} - /> + {variable.type === 'String' && ( + setLocalInputValue(value)} + onPressEnter={() => { + onChange(variable.id, localInputValue) + closePopover() + }} + /> + )} + {variable.type === 'Number' && ( + setLocalInputValue(value ?? 0)} + onPressEnter={() => { + onChange(variable.id, localInputValue) + closePopover() + }} + /> + )} + {variable.type === 'Boolean' && ( + setLocalInputValue(value === 'true')} + options={[ + { + value: 'true', + label: 'true', + }, + { + value: 'false', + label: 'false', + }, + ]} + /> + )} + {variable.type === 'List' && ( + setLocalInputValue(value)} + options={variable.values.map((n) => ({ label: n, value: n }))} + /> + )} { @@ -142,7 +196,7 @@ const VariableInput = ({ } } }} - className="text-xs flex flex-1 items-center" + className="text-xs flex flex-1 items-center mr-2" > {variableAsHogQL} @@ -160,7 +214,14 @@ const VariableInput = ({ tooltip="Remove variable from insight" /> )} - } size="xsmall" tooltip="Open variable settings" /> + {variableSettingsOnClick && ( + } + size="xsmall" + tooltip="Open variable settings" + /> + )}
)} @@ -174,6 +235,7 @@ interface VariableComponentProps { onChange: (variableId: string, value: any) => void variableOverridesAreSet: boolean onRemove?: (variableId: string) => void + variableSettingsOnClick?: () => void } const VariableComponent = ({ @@ -182,9 +244,21 @@ const VariableComponent = ({ onChange, variableOverridesAreSet, onRemove, + variableSettingsOnClick, }: VariableComponentProps): JSX.Element => { const [isPopoverOpen, setPopoverOpen] = useState(false) + // Dont show the popover overlay for list variables not in edit mode + if (!showEditingUI && variable.type === 'List') { + return ( + onChange(variable.id, value)} + options={variable.values.map((n) => ({ label: n, value: n }))} + /> + ) + } + return ( setPopoverOpen(false)} onRemove={onRemove} + variableSettingsOnClick={() => { + if (variableSettingsOnClick) { + setPopoverOpen(false) + variableSettingsOnClick() + } + }} /> } visible={isPopoverOpen} @@ -213,7 +293,7 @@ const VariableComponent = ({ onClick={() => setPopoverOpen(!isPopoverOpen)} disabledReason={variableOverridesAreSet && 'Discard dashboard variables to change'} > - {variable.value ?? variable.default_value} + {variable.value?.toString() ?? variable.default_value?.toString()}
diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/addVariableLogic.ts b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variableModalLogic.ts similarity index 72% rename from frontend/src/queries/nodes/DataVisualization/Components/Variables/addVariableLogic.ts rename to frontend/src/queries/nodes/DataVisualization/Components/Variables/variableModalLogic.ts index a8802e6b6b6ea..641e991250537 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/addVariableLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variableModalLogic.ts @@ -3,8 +3,8 @@ import { actions, connect, kea, key, listeners, path, props, reducers } from 'ke import api, { ApiError } from 'lib/api' import { BooleanVariable, ListVariable, NumberVariable, StringVariable, Variable, VariableType } from '../../types' -import type { addVariableLogicType } from './addVariableLogicType' import { variableDataLogic } from './variableDataLogic' +import type { variableModalLogicType } from './variableModalLogicType' import { variablesLogic } from './variablesLogic' const DEFAULT_VARIABLE: StringVariable = { @@ -19,7 +19,7 @@ export interface AddVariableLogicProps { key: string } -export const addVariableLogic = kea([ +export const variableModalLogic = kea([ path(['queries', 'nodes', 'DataVisualization', 'Components', 'Variables', 'variableLogic']), props({ key: '' } as AddVariableLogicProps), key((props) => props.key), @@ -27,29 +27,40 @@ export const addVariableLogic = kea([ actions: [variableDataLogic, ['getVariables'], variablesLogic, ['addVariable']], }), actions({ - openModal: (variableType: VariableType) => ({ variableType }), + openNewVariableModal: (variableType: VariableType) => ({ variableType }), + openExistingVariableModal: (variable: Variable) => ({ variable }), closeModal: true, updateVariable: (variable: Variable) => ({ variable }), save: true, }), reducers({ + modalType: [ + 'new' as 'new' | 'existing', + { + openNewVariableModal: () => 'new', + openExistingVariableModal: () => 'existing', + }, + ], variableType: [ 'string' as VariableType, { - openModal: (_, { variableType }) => variableType, + openNewVariableModal: (_, { variableType }) => variableType, + openExistingVariableModal: (_, { variable }) => variable.type, }, ], isModalOpen: [ false as boolean, { - openModal: () => true, + openNewVariableModal: () => true, + openExistingVariableModal: () => true, closeModal: () => false, }, ], variable: [ DEFAULT_VARIABLE as Variable, { - openModal: (_, { variableType }) => { + openExistingVariableModal: (_, { variable }) => ({ ...variable }), + openNewVariableModal: (_, { variableType }) => { if (variableType === 'String') { return { id: '', @@ -101,10 +112,14 @@ export const addVariableLogic = kea([ listeners(({ values, actions }) => ({ save: async () => { try { - const variable = await api.insightVariables.create(values.variable) + if (values.modalType === 'new') { + const variable = await api.insightVariables.create(values.variable) + actions.addVariable({ variableId: variable.id, code_name: variable.code_name }) + } else { + await api.insightVariables.update(values.variable.id, values.variable) + } actions.getVariables() - actions.addVariable({ variableId: variable.id, code_name: variable.code_name }) actions.closeModal() } catch (e: any) { const error = e as ApiError diff --git a/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts index 61f2590242a73..937c027a0a104 100644 --- a/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts +++ b/frontend/src/queries/nodes/DataVisualization/Components/Variables/variablesLogic.ts @@ -7,7 +7,7 @@ import { getVariablesFromQuery, haveVariablesOrFiltersChanged } from 'scenes/ins import { DataVisualizationNode, HogQLVariable } from '~/queries/schema' import { dataVisualizationLogic } from '../../dataVisualizationLogic' -import { Variable } from '../../types' +import { Variable, VariableType } from '../../types' import { variableDataLogic } from './variableDataLogic' import type { variablesLogicType } from './variablesLogicType' @@ -17,6 +17,18 @@ export interface VariablesLogicProps { readOnly: boolean } +const convertValueToCorrectType = (value: string, type: VariableType): number | string | boolean => { + if (type === 'Number') { + return Number(value) + } + + if (type === 'Boolean' && typeof value === 'string') { + return value.toLowerCase() === 'true' + } + + return value +} + export const variablesLogic = kea([ path(['queries', 'nodes', 'DataVisualization', 'Components', 'Variables', 'variablesLogic']), props({ key: '' } as VariablesLogicProps), @@ -32,15 +44,18 @@ export const variablesLogic = kea([ ['featureFlags'], ], }), - actions({ + actions(({ values }) => ({ addVariable: (variable: HogQLVariable) => ({ variable }), addVariables: (variables: HogQLVariable[]) => ({ variables }), removeVariable: (variableId: string) => ({ variableId }), - updateVariableValue: (variableId: string, value: any) => ({ variableId, value }), + updateVariableValue: (variableId: string, value: any) => ({ + variableId, + value, + allVariables: values.variables, + }), setEditorQuery: (query: string) => ({ query }), - resetVariables: true, updateSourceQuery: true, - }), + })), reducers({ internalSelectedVariables: [ [] as HogQLVariable[], @@ -52,17 +67,20 @@ export const variablesLogic = kea([ return [...state, { ...variable }] }, - addVariables: (state, { variables }) => { - return [...state, ...variables.map((n) => ({ ...n }))] + addVariables: (_state, { variables }) => { + return [...variables.map((n) => ({ ...n }))] }, - updateVariableValue: (state, { variableId, value }) => { + updateVariableValue: (state, { variableId, value, allVariables }) => { const variableIndex = state.findIndex((n) => n.variableId === variableId) if (variableIndex < 0) { return state } + const variableType = allVariables.find((n) => n.id === variableId)?.type + const valueWithType = convertValueToCorrectType(value, variableType ?? 'String') + const variablesInState = [...state] - variablesInState[variableIndex] = { ...variablesInState[variableIndex], value } + variablesInState[variableIndex] = { ...variablesInState[variableIndex], value: valueWithType } return variablesInState }, @@ -70,12 +88,11 @@ export const variablesLogic = kea([ const stateCopy = [...state] const index = stateCopy.findIndex((n) => n.variableId === variableId) if (index >= 0) { - stateCopy.splice(index) + stateCopy.splice(index, 1) } return stateCopy }, - resetVariables: () => [], }, ], editorQuery: [ @@ -88,9 +105,9 @@ export const variablesLogic = kea([ }), selectors({ variablesForInsight: [ - (s) => [s.variables, s.internalSelectedVariables, s.variablesLoading], - (variables, internalSelectedVariables, variablesLoading): Variable[] => { - if (!variables.length || !internalSelectedVariables.length || variablesLoading) { + (s) => [s.variables, s.internalSelectedVariables], + (variables, internalSelectedVariables): Variable[] => { + if (!variables.length || !internalSelectedVariables.length) { return [] } @@ -128,16 +145,16 @@ export const variablesLogic = kea([ return } - const variables = values.variablesForInsight + const variables = values.internalSelectedVariables const query: DataVisualizationNode = { ...values.query, source: { ...values.query.source, variables: variables.reduce((acc, cur) => { - if (cur.id) { - acc[cur.id] = { - variableId: cur.id, + if (cur.variableId) { + acc[cur.variableId] = { + variableId: cur.variableId, value: cur.value, code_name: cur.code_name, } @@ -153,12 +170,11 @@ export const variablesLogic = kea([ return } + actions.setQuery(query) + if (props.readOnly) { // Refresh the data manaully via dataNodeLogic when in insight view mode actions.loadData(true, undefined, query.source) - } else { - // Update the query source when in edit mode - actions.setQuery(query) } }, })), @@ -176,7 +192,7 @@ export const variablesLogic = kea([ return } - const variableAlreadySelected = values.variablesForInsight.find((n) => n.code_name === match) + const variableAlreadySelected = values.internalSelectedVariables.find((n) => n.code_name === match) if (!variableAlreadySelected) { actions.addVariable({ variableId: variableExists.id, code_name: variableExists.code_name }) } @@ -187,8 +203,6 @@ export const variablesLogic = kea([ return } - actions.resetVariables() - const variables = Object.values(query.source.variables ?? {}) if (variables.length) { diff --git a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx index 799a135e5a649..9a021d962b0f9 100644 --- a/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx +++ b/frontend/src/queries/nodes/DataVisualization/DataVisualization.tsx @@ -39,7 +39,7 @@ import { SideBar } from './Components/SideBar' import { Table } from './Components/Table' import { TableDisplay } from './Components/TableDisplay' import { AddVariableButton } from './Components/Variables/AddVariableButton' -import { addVariableLogic } from './Components/Variables/addVariableLogic' +import { variableModalLogic } from './Components/Variables/variableModalLogic' import { VariablesForInsight } from './Components/Variables/Variables' import { variablesLogic } from './Components/Variables/variablesLogic' import { dataVisualizationLogic, DataVisualizationLogicProps } from './dataVisualizationLogic' @@ -48,7 +48,7 @@ import { displayLogic } from './displayLogic' interface DataTableVisualizationProps { uniqueKey?: string | number query: DataVisualizationNode - setQuery?: (query: DataVisualizationNode) => void + setQuery: (query: DataVisualizationNode) => void context?: QueryContext /* Cached Results are provided when shared or exported, the data node logic becomes read only implicitly */ @@ -104,7 +104,7 @@ export function DataTableVisualization({ logic={variablesLogic} props={{ key: dataVisualizationLogicProps.key, readOnly: readOnly ?? false }} > - + { }, ]) }) + + it('handles negation for cohorts', () => { + let properties: any = [ + { + key: 'id', + type: 'cohort', + value: 1, + operator: 'exact', + negation: false, + }, + ] + let result = cleanEntityProperties(properties) + expect(result).toEqual([{ key: 'id', type: 'cohort', value: 1, operator: 'exact' }]) + + properties = [{ key: 'id', type: 'cohort', value: 1, operator: 'exact', negation: true }] + result = cleanEntityProperties(properties) + expect(result).toEqual([{ key: 'id', type: 'cohort', value: 1, operator: 'not_in' }]) + + properties = [{ key: 'id', type: 'cohort', value: 1, negation: true }] + result = cleanEntityProperties(properties) + expect(result).toEqual([{ key: 'id', type: 'cohort', value: 1, operator: 'not_in' }]) + }) }) diff --git a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts index 4474b6423dbf6..82edbfb56cfb9 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/cleanProperties.ts @@ -142,6 +142,14 @@ const cleanProperty = (property: Record): AnyPropertyFilter => { delete property['operator'] } + // convert `negation` for cohorts + if (property['type'] === 'cohort' && property['negation'] !== undefined) { + if (property['operator'] === PropertyOperator.Exact && property['negation']) { + property['operator'] = PropertyOperator.NotIn + } + delete property['negation'] + } + // remove none from values if (Array.isArray(property['value'])) { property['value'] = property['value'].filter((x) => x !== null) diff --git a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts index 742320e8197eb..c7ccb75fb6c61 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/filtersToQueryNode.ts @@ -138,6 +138,7 @@ export const legacyEntityToNode = ( ...shared, math: entity.math || 'total', math_property: entity.math_property, + math_property_type: entity.math_property_type, math_hogql: entity.math_hogql, math_group_type_index: entity.math_group_type_index, } as any @@ -392,6 +393,7 @@ export const trendsFilterToQuery = (filters: Partial): TrendsF return objectCleanWithEmpty({ smoothingIntervals: filters.smoothing_intervals, showLegend: filters.show_legend, + showAlertThresholdLines: filters.show_alert_threshold_lines, hiddenLegendIndexes: hiddenLegendKeysToIndexes(filters.hidden_legend_keys), aggregationAxisFormat: filters.aggregation_axis_format, aggregationAxisPrefix: filters.aggregation_axis_prefix, diff --git a/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts b/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts index 9a8f36c6cb548..89e0a6f3d8e7b 100644 --- a/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts +++ b/frontend/src/queries/nodes/InsightQuery/utils/queryNodeToFilter.ts @@ -1,3 +1,4 @@ +import { TaxonomicFilterGroupType } from 'lib/components/TaxonomicFilter/types' import { objectClean } from 'lib/utils' import { @@ -53,6 +54,7 @@ export const seriesNodeToFilter = ( // TODO: math is not supported by funnel and lifecycle queries math: node.math, math_property: node.math_property, + math_property_type: node.math_property_type as TaxonomicFilterGroupType, math_hogql: node.math_hogql, math_group_type_index: node.math_group_type_index, properties: node.properties as any, // TODO, diff --git a/frontend/src/queries/nodes/InsightViz/InsightDisplayConfig.tsx b/frontend/src/queries/nodes/InsightViz/InsightDisplayConfig.tsx index 1e72d999b1dfe..d75685def2228 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightDisplayConfig.tsx +++ b/frontend/src/queries/nodes/InsightViz/InsightDisplayConfig.tsx @@ -14,6 +14,7 @@ import { funnelDataLogic } from 'scenes/funnels/funnelDataLogic' import { axisLabel } from 'scenes/insights/aggregationAxisFormat' import { PercentStackViewFilter } from 'scenes/insights/EditorFilters/PercentStackViewFilter' import { ScalePicker } from 'scenes/insights/EditorFilters/ScalePicker' +import { ShowAlertThresholdLinesFilter } from 'scenes/insights/EditorFilters/ShowAlertThresholdLinesFilter' import { ShowLegendFilter } from 'scenes/insights/EditorFilters/ShowLegendFilter' import { ValueOnSeriesFilter } from 'scenes/insights/EditorFilters/ValueOnSeriesFilter' import { InsightDateFilter } from 'scenes/insights/filters/InsightDateFilter' @@ -77,6 +78,7 @@ export function InsightDisplayConfig(): JSX.Element { ...(supportsValueOnSeries ? [{ label: () => }] : []), ...(supportsPercentStackView ? [{ label: () => }] : []), ...(hasLegend ? [{ label: () => }] : []), + { label: () => }, ], }, ] diff --git a/frontend/src/queries/nodes/InsightViz/InsightViz.tsx b/frontend/src/queries/nodes/InsightViz/InsightViz.tsx index ba58d4a16b5af..5e0e7be45a936 100644 --- a/frontend/src/queries/nodes/InsightViz/InsightViz.tsx +++ b/frontend/src/queries/nodes/InsightViz/InsightViz.tsx @@ -32,7 +32,7 @@ export const insightVizDataCollectionId = (props: InsightLogicProps | undef type InsightVizProps = { uniqueKey?: string | number query: InsightVizNode - setQuery?: (node: InsightVizNode) => void + setQuery: (node: InsightVizNode) => void context?: QueryContext readOnly?: boolean embedded?: boolean diff --git a/frontend/src/queries/nodes/SavedInsight/SavedInsight.tsx b/frontend/src/queries/nodes/SavedInsight/SavedInsight.tsx index 9135c32cf5156..f67da5034e64f 100644 --- a/frontend/src/queries/nodes/SavedInsight/SavedInsight.tsx +++ b/frontend/src/queries/nodes/SavedInsight/SavedInsight.tsx @@ -12,9 +12,11 @@ import { InsightLogicProps } from '~/types' interface InsightProps { query: SavedInsightNode context?: QueryContext + embedded?: boolean + readOnly?: boolean } -export function SavedInsight({ query: propsQuery, context }: InsightProps): JSX.Element { +export function SavedInsight({ query: propsQuery, context, embedded, readOnly }: InsightProps): JSX.Element { const insightProps: InsightLogicProps = { dashboardItemId: propsQuery.shortId } const { insight, insightLoading } = useValues(insightLogic(insightProps)) const { query: dataQuery } = useValues(insightDataLogic(insightProps)) @@ -29,5 +31,13 @@ export function SavedInsight({ query: propsQuery, context }: InsightProps): JSX. const query = { ...propsQuery, ...dataQuery, full: propsQuery.full } - return + return ( + + ) } diff --git a/frontend/src/queries/query.ts b/frontend/src/queries/query.ts index 1952432e3607f..2a0af2943316e 100644 --- a/frontend/src/queries/query.ts +++ b/frontend/src/queries/query.ts @@ -15,6 +15,7 @@ import { NodeKind, PersonsNode, QueryStatus, + RefreshType, } from './schema' import { isAsyncResponse, @@ -101,12 +102,13 @@ async function executeQuery( !!featureFlagLogic.findMounted()?.values.featureFlags?.[FEATURE_FLAGS.QUERY_ASYNC] if (!pollOnly) { + const refreshParam: RefreshType | undefined = + refresh && isAsyncQuery ? 'force_async' : isAsyncQuery ? 'async' : refresh const response = await api.query( queryNode, methodOptions, queryId, - refresh, - isAsyncQuery, + refreshParam, filtersOverride, variablesOverride ) diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json index a9dc7be1bf476..75b26c88bc4de 100644 --- a/frontend/src/queries/schema.json +++ b/frontend/src/queries/schema.json @@ -31,6 +31,9 @@ "math_property": { "type": "string" }, + "math_property_type": { + "type": "string" + }, "name": { "type": "string" }, @@ -84,6 +87,9 @@ "math_property": { "type": "string" }, + "math_property_type": { + "type": "string" + }, "name": { "type": "string" }, @@ -118,9 +124,6 @@ { "$ref": "#/definitions/SessionPropertyFilter" }, - { - "$ref": "#/definitions/CohortPropertyFilter" - }, { "$ref": "#/definitions/GroupPropertyFilter" }, @@ -172,6 +175,9 @@ "math_property": { "type": "string" }, + "math_property_type": { + "type": "string" + }, "name": { "type": "string" }, @@ -255,7 +261,20 @@ }, "sample_values": { "items": { - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "boolean" + }, + { + "type": "integer" + } + ] }, "type": "array" } @@ -598,6 +617,24 @@ } ] }, + "AssistantMessage": { + "additionalProperties": false, + "properties": { + "content": { + "type": "string" + }, + "type": { + "const": "ai", + "type": "string" + } + }, + "required": ["type", "content"], + "type": "object" + }, + "AssistantMessageType": { + "enum": ["human", "ai", "ai/viz"], + "type": "string" + }, "AutocompleteCompletionItem": { "additionalProperties": false, "properties": { @@ -3986,6 +4023,9 @@ "math_property": { "type": "string" }, + "math_property_type": { + "type": "string" + }, "name": { "type": "string" }, @@ -4503,6 +4543,9 @@ "math_property": { "type": "string" }, + "math_property_type": { + "type": "string" + }, "name": { "type": "string" }, @@ -4935,6 +4978,9 @@ "math_property": { "type": "string" }, + "math_property_type": { + "type": "string" + }, "name": { "type": "string" }, @@ -5641,6 +5687,9 @@ "math_property": { "type": "string" }, + "math_property_type": { + "type": "string" + }, "name": { "type": "string" }, @@ -5701,6 +5750,9 @@ "math_property": { "type": "string" }, + "math_property_type": { + "type": "string" + }, "name": { "type": "string" }, @@ -6696,6 +6748,20 @@ "required": ["results"], "type": "object" }, + "HumanMessage": { + "additionalProperties": false, + "properties": { + "content": { + "type": "string" + }, + "type": { + "const": "human", + "type": "string" + } + }, + "required": ["type", "content"], + "type": "object" + }, "InsightActorsQuery": { "additionalProperties": false, "properties": { @@ -9932,6 +9998,105 @@ }, "required": ["questions"], "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "error": { + "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + "type": "string" + }, + "hogql": { + "description": "Generated HogQL query.", + "type": "string" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "results": { + "$ref": "#/definitions/TeamTaxonomyResponse" + }, + "timings": { + "description": "Measured timings for different parts of the query generation process", + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + } + }, + "required": ["results"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "error": { + "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + "type": "string" + }, + "hogql": { + "description": "Generated HogQL query.", + "type": "string" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "results": { + "$ref": "#/definitions/EventTaxonomyResponse" + }, + "timings": { + "description": "Measured timings for different parts of the query generation process", + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + } + }, + "required": ["results"], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "error": { + "description": "Query error. Returned only if 'explain' or `modifiers.debug` is true. Throws an error otherwise.", + "type": "string" + }, + "hogql": { + "description": "Generated HogQL query.", + "type": "string" + }, + "modifiers": { + "$ref": "#/definitions/HogQLQueryModifiers", + "description": "Modifiers used when performing the query" + }, + "query_status": { + "$ref": "#/definitions/QueryStatus", + "description": "Query status indicates whether next to the provided data, a query is still running." + }, + "results": { + "$ref": "#/definitions/ActorsPropertyTaxonomyResponse" + }, + "timings": { + "description": "Measured timings for different parts of the query generation process", + "items": { + "$ref": "#/definitions/QueryTiming" + }, + "type": "array" + } + }, + "required": ["results"], + "type": "object" } ] }, @@ -10044,6 +10209,15 @@ }, { "$ref": "#/definitions/SuggestedQuestionsQuery" + }, + { + "$ref": "#/definitions/TeamTaxonomyQuery" + }, + { + "$ref": "#/definitions/EventTaxonomyQuery" + }, + { + "$ref": "#/definitions/ActorsPropertyTaxonomyQuery" } ], "required": ["kind"], @@ -10148,6 +10322,21 @@ "required": ["k", "t"], "type": "object" }, + "RecordingOrder": { + "enum": [ + "duration", + "recording_duration", + "inactive_seconds", + "active_seconds", + "start_time", + "console_error_count", + "click_count", + "keypress_count", + "mouse_activity_count", + "activity_score" + ], + "type": "string" + }, "RecordingPropertyFilter": { "additionalProperties": false, "properties": { @@ -10237,31 +10426,7 @@ "$ref": "#/definitions/FilterLogicalOperator" }, "order": { - "anyOf": [ - { - "$ref": "#/definitions/DurationType" - }, - { - "const": "start_time", - "type": "string" - }, - { - "const": "console_error_count", - "type": "string" - }, - { - "const": "click_count", - "type": "string" - }, - { - "const": "keypress_count", - "type": "string" - }, - { - "const": "mouse_activity_count", - "type": "string" - } - ] + "$ref": "#/definitions/RecordingOrder" }, "person_uuid": { "type": "string" @@ -10285,7 +10450,7 @@ "type": "object" } }, - "required": ["kind", "order"], + "required": ["kind"], "type": "object" }, "RecordingsQueryResponse": { @@ -10313,6 +10478,10 @@ "const": "async", "type": "string" }, + { + "const": "async_except_on_cache_miss", + "type": "string" + }, { "const": "blocking", "type": "string" @@ -10556,6 +10725,19 @@ "required": ["count"], "type": "object" }, + "RootAssistantMessage": { + "anyOf": [ + { + "$ref": "#/definitions/VisualizationMessage" + }, + { + "$ref": "#/definitions/AssistantMessage" + }, + { + "$ref": "#/definitions/HumanMessage" + } + ] + }, "SamplingRate": { "additionalProperties": false, "properties": { @@ -10833,6 +11015,10 @@ "active_seconds": { "type": "number" }, + "activity_score": { + "description": "calculated on the backend so that we can sort by it, definition may change over time", + "type": "number" + }, "click_count": { "type": "number" }, @@ -11468,6 +11654,10 @@ }, "type": "array" }, + "showAlertThresholdLines": { + "default": false, + "type": "boolean" + }, "showLabelsOnSeries": { "type": "boolean" }, @@ -11538,6 +11728,9 @@ }, "type": "object" }, + "show_alert_threshold_lines": { + "type": "boolean" + }, "show_labels_on_series": { "type": "boolean" }, @@ -11674,6 +11867,36 @@ "required": ["results"], "type": "object" }, + "VisualizationMessage": { + "additionalProperties": false, + "properties": { + "answer": { + "$ref": "#/definitions/ExperimentalAITrendsQuery" + }, + "plan": { + "type": "string" + }, + "reasoning_steps": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ] + }, + "type": { + "const": "ai/viz", + "type": "string" + } + }, + "required": ["type"], + "type": "object" + }, "VizSpecificOptions": { "additionalProperties": false, "description": "Chart specific rendering options. Use ChartRenderingMetadata for non-serializable values, e.g. onClick handlers", diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts index 1887f57ee0f96..273605a42f6d7 100644 --- a/frontend/src/queries/schema.ts +++ b/frontend/src/queries/schema.ts @@ -9,9 +9,7 @@ import { BreakdownType, ChartDisplayCategory, ChartDisplayType, - CohortPropertyFilter, CountPerActorMathType, - DurationType, EventPropertyFilter, EventType, FeaturePropertyFilter, @@ -184,6 +182,9 @@ export type QuerySchema = // AI | SuggestedQuestionsQuery + | TeamTaxonomyQuery + | EventTaxonomyQuery + | ActorsPropertyTaxonomyQuery // Keep this, because QuerySchema itself will be collapsed as it is used in other models export type QuerySchemaRoot = QuerySchema @@ -311,6 +312,18 @@ export interface RecordingsQueryResponse { has_next: boolean } +export type RecordingOrder = + | 'duration' + | 'recording_duration' + | 'inactive_seconds' + | 'active_seconds' + | 'start_time' + | 'console_error_count' + | 'click_count' + | 'keypress_count' + | 'mouse_activity_count' + | 'activity_score' + export interface RecordingsQuery extends DataNode { kind: NodeKind.RecordingsQuery date_from?: string | null @@ -324,13 +337,7 @@ export interface RecordingsQuery extends DataNode { operand?: FilterLogicalOperator session_ids?: string[] person_uuid?: string - order: - | DurationType - | 'start_time' - | 'console_error_count' - | 'click_count' - | 'keypress_count' - | 'mouse_activity_count' + order?: RecordingOrder limit?: integer offset?: integer user_modified_filters?: Record @@ -479,6 +486,7 @@ export interface EntityNode extends Node { custom_name?: string math?: MathType math_property?: string + math_property_type?: string math_hogql?: string math_group_type_index?: 0 | 1 | 2 | 3 | 4 /** Properties configurable in the interface */ @@ -834,6 +842,8 @@ export type TrendsFilter = { display?: TrendsFilterLegacy['display'] /** @default false */ showLegend?: TrendsFilterLegacy['show_legend'] + /** @default false */ + showAlertThresholdLines?: boolean breakdown_histogram_bin_count?: TrendsFilterLegacy['breakdown_histogram_bin_count'] // TODO: fully move into BreakdownFilter /** @default numeric */ aggregationAxisFormat?: TrendsFilterLegacy['aggregation_axis_format'] @@ -894,15 +904,7 @@ export interface TrendsQuery extends InsightsQueryBase { export type AIPropertyFilter = | EventPropertyFilter | PersonPropertyFilter - // | ElementPropertyFilter | SessionPropertyFilter - | CohortPropertyFilter - // | RecordingPropertyFilter - // | LogEntryPropertyFilter - // | HogQLPropertyFilter - // | EmptyPropertyFilter - // | DataWarehousePropertyFilter - // | DataWarehousePersonPropertyFilter | GroupPropertyFilter | FeaturePropertyFilter @@ -1180,6 +1182,7 @@ export type LifecycleFilter = { export type RefreshType = | boolean | 'async' + | 'async_except_on_cache_miss' | 'blocking' | 'force_async' | 'force_blocking' @@ -2073,7 +2076,9 @@ export type EventTaxonomyQueryResponse = AnalyticsQueryResponseBase export interface ActorsPropertyTaxonomyResponse { - sample_values: string[] + // Values can be floats and integers. The comment below is to preserve the `integer` type. + // eslint-disable-next-line @typescript-eslint/no-duplicate-type-constituents + sample_values: (string | number | boolean | integer)[] sample_count: integer } @@ -2086,3 +2091,28 @@ export interface ActorsPropertyTaxonomyQuery extends DataNode export type CachedActorsPropertyTaxonomyQueryResponse = CachedQueryResponse + +export enum AssistantMessageType { + Human = 'human', + Assistant = 'ai', + Visualization = 'ai/viz', +} + +export interface HumanMessage { + type: AssistantMessageType.Human + content: string +} + +export interface AssistantMessage { + type: AssistantMessageType.Assistant + content: string +} + +export interface VisualizationMessage { + type: AssistantMessageType.Visualization + plan?: string + reasoning_steps?: string[] | null + answer?: ExperimentalAITrendsQuery +} + +export type RootAssistantMessage = VisualizationMessage | AssistantMessage | HumanMessage diff --git a/frontend/src/queries/types.ts b/frontend/src/queries/types.ts index c0becc7b8a6a4..afbae27286816 100644 --- a/frontend/src/queries/types.ts +++ b/frontend/src/queries/types.ts @@ -1,7 +1,7 @@ import { ComponentType, HTMLProps } from 'react' import { QueryFeature } from '~/queries/nodes/DataTable/queryFeatures' -import { DataTableNode, InsightVizNode } from '~/queries/schema' +import { DataTableNode, DataVisualizationNode, InsightVizNode } from '~/queries/schema' import { ChartDisplayType, GraphPointPayload, InsightLogicProps, TrendResult } from '~/types' /** Pass custom metadata to queries. Used for e.g. custom columns in the DataTable. */ @@ -37,12 +37,12 @@ export interface ChartRenderingMetadata { export type QueryContextColumnTitleComponent = ComponentType<{ columnName: string - query: DataTableNode + query: DataTableNode | DataVisualizationNode }> export type QueryContextColumnComponent = ComponentType<{ columnName: string - query: DataTableNode + query: DataTableNode | DataVisualizationNode record: unknown recordIndex: number value: unknown diff --git a/frontend/src/queries/utils.ts b/frontend/src/queries/utils.ts index ed9cfc8d2fcf1..f2828675a643d 100644 --- a/frontend/src/queries/utils.ts +++ b/frontend/src/queries/utils.ts @@ -301,6 +301,13 @@ export const getShowLegend = (query: InsightQueryNode): boolean | undefined => { return undefined } +export const getShowAlertThresholdLines = (query: InsightQueryNode): boolean | undefined => { + if (isTrendsQuery(query)) { + return query.trendsFilter?.showAlertThresholdLines + } + return undefined +} + export const getShowLabelsOnSeries = (query: InsightQueryNode): boolean | undefined => { if (isTrendsQuery(query)) { return query.trendsFilter?.showLabelsOnSeries diff --git a/frontend/src/scenes/actions/ActionHogFunctions.tsx b/frontend/src/scenes/actions/ActionHogFunctions.tsx index 056bdea24fd8e..6128a2c1d664c 100644 --- a/frontend/src/scenes/actions/ActionHogFunctions.tsx +++ b/frontend/src/scenes/actions/ActionHogFunctions.tsx @@ -35,6 +35,7 @@ export function ActionHogFunctions(): JSX.Element | null { ) : null} any> = { [Scene.Heatmaps]: () => import('./heatmaps/HeatmapsScene'), [Scene.SessionAttributionExplorer]: () => import('scenes/web-analytics/SessionAttributionExplorer/SessionAttributionExplorerScene'), + [Scene.MessagingProviders]: () => import('./messaging/Providers'), + [Scene.MessagingBroadcasts]: () => import('./messaging/Broadcasts'), } diff --git a/frontend/src/scenes/data-management/definition/DefinitionView.tsx b/frontend/src/scenes/data-management/definition/DefinitionView.tsx index 57432ffd91a8e..f1ce43e5bb0c9 100644 --- a/frontend/src/scenes/data-management/definition/DefinitionView.tsx +++ b/frontend/src/scenes/data-management/definition/DefinitionView.tsx @@ -203,6 +203,7 @@ export function DefinitionView(props: DefinitionLogicProps = {}): JSX.Element {

Get notified via Slack, webhooks or more whenever this event is captured.

} -const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConfig): JSX.Element => { +const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConfig, lastValue?: any): JSX.Element => { if (field.type === 'switch-group') { return ( {({ value, onChange }) => ( <> - + {value && ( - {field.fields.map((field) => sourceFieldToElement(field, sourceConfig))} + {field.fields.map((field) => + sourceFieldToElement(field, sourceConfig, lastValue?.[field.name]) + )} )} @@ -43,11 +47,21 @@ const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConf > {({ value, onChange }) => ( <> - + {field.options .find((n) => n.value === (value ?? field.defaultValue)) - ?.fields?.map((field) => sourceFieldToElement(field, sourceConfig))} + ?.fields?.map((field) => + sourceFieldToElement(field, sourceConfig, lastValue?.[field.name]) + )} )} @@ -63,6 +77,7 @@ const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConf data-attr={field.name} placeholder={field.placeholder} minRows={4} + defaultValue={lastValue} /> ) @@ -102,32 +117,33 @@ const sourceFieldToElement = (field: SourceFieldConfig, sourceConfig: SourceConf data-attr={field.name} placeholder={field.placeholder} type={field.type} + defaultValue={lastValue} /> ) } -export default function SourceForm({ sourceConfig }: SourceFormProps): JSX.Element { - const { source } = useValues(sourceWizardLogic) - const showSourceFields = SOURCE_DETAILS[sourceConfig.name].showSourceForm - ? SOURCE_DETAILS[sourceConfig.name].showSourceForm?.(source.payload) - : true - const showPrefix = SOURCE_DETAILS[sourceConfig.name].showPrefix - ? SOURCE_DETAILS[sourceConfig.name].showPrefix?.(source.payload) - : true +export default function SourceFormContainer(props: SourceFormProps): JSX.Element { + return ( +
+ + + ) +} +export function SourceFormComponent({ sourceConfig, showPrefix = true, jobInputs }: SourceFormProps): JSX.Element { return ( -
- {showSourceFields && ( - - {SOURCE_DETAILS[sourceConfig.name].fields.map((field) => sourceFieldToElement(field, sourceConfig))} - - )} +
+ + {SOURCE_DETAILS[sourceConfig.name].fields.map((field) => + sourceFieldToElement(field, sourceConfig, jobInputs?.[field.name]) + )} + {showPrefix && ( )} - +
) } diff --git a/frontend/src/scenes/data-warehouse/external/forms/SyncMethodForm.tsx b/frontend/src/scenes/data-warehouse/external/forms/SyncMethodForm.tsx index 52238ef21654c..8d4bafe96632a 100644 --- a/frontend/src/scenes/data-warehouse/external/forms/SyncMethodForm.tsx +++ b/frontend/src/scenes/data-warehouse/external/forms/SyncMethodForm.tsx @@ -10,7 +10,7 @@ const getIncrementalSyncSupported = ( if (!schema.incremental_available) { return { disabled: true, - disabledReason: "Incremental append replication isn't supported on this table", + disabledReason: "Incremental replication isn't supported on this table", } } @@ -109,23 +109,18 @@ export const SyncMethodForm = ({ label: (
-

Incremental append replication

+

Incremental replication

{!incrementalSyncSupported.disabled && ( Recommended )}

- When using incremental append replication, we'll store the max value of the below - field on each sync and only sync rows with greater or equal value on the next run. + When using incremental replication, we'll store the max value of the below field on + each sync and only sync rows with greater or equal value on the next run.

- You should pick a field that increments for each row, such as a{' '} - created_at timestamp. -

-

- This method will append all new rows to your existing table - this means duplicate - data can exist if the incremental field updates for updated rows (such as when using - an updated_at field) + You should pick a field that increments or updates each time the row is updated, + such as a updated_at timestamp.

{showRefreshMessage && (

- Note: Changing the sync type or incremental append replication field will trigger a full table - refresh + Note: Changing the sync type or incremental replication field will trigger a full table refresh

)}
@@ -185,7 +179,7 @@ export const SyncMethodForm = ({ (n) => n.field === incrementalFieldValue ) if (!fieldSelected) { - lemonToast.error('Selected field for incremental append replication not found') + lemonToast.error('Selected field for incremental replication not found') return } diff --git a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts index 9eeaa4c96f832..c0adf6907c65d 100644 --- a/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts +++ b/frontend/src/scenes/data-warehouse/settings/dataWarehouseSettingsLogic.ts @@ -7,7 +7,7 @@ import posthog from 'posthog-js' import { databaseTableListLogic } from 'scenes/data-management/database/databaseTableListLogic' import { DatabaseSchemaDataWarehouseTable } from '~/queries/schema' -import { DataWarehouseSettingsTab, ExternalDataSourceSchema, ExternalDataStripeSource } from '~/types' +import { DataWarehouseSettingsTab, ExternalDataSource, ExternalDataSourceSchema } from '~/types' import type { dataWarehouseSettingsLogicType } from './dataWarehouseSettingsLogicType' @@ -31,9 +31,9 @@ export const dataWarehouseSettingsLogic = kea([ actions: [databaseTableListLogic, ['loadDatabase']], })), actions({ - deleteSource: (source: ExternalDataStripeSource) => ({ source }), - reloadSource: (source: ExternalDataStripeSource) => ({ source }), - sourceLoadingFinished: (source: ExternalDataStripeSource) => ({ source }), + deleteSource: (source: ExternalDataSource) => ({ source }), + reloadSource: (source: ExternalDataSource) => ({ source }), + sourceLoadingFinished: (source: ExternalDataSource) => ({ source }), schemaLoadingFinished: (schema: ExternalDataSourceSchema) => ({ schema }), abortAnyRunningQuery: true, deleteSelfManagedTable: (tableId: string) => ({ tableId }), @@ -41,7 +41,7 @@ export const dataWarehouseSettingsLogic = kea([ }), loaders(({ cache, actions, values }) => ({ dataWarehouseSources: [ - null as PaginatedResponse | null, + null as PaginatedResponse | null, { loadSources: async (_, breakpoint) => { await breakpoint(300) @@ -59,7 +59,7 @@ export const dataWarehouseSettingsLogic = kea([ return res }, - updateSource: async (source: ExternalDataStripeSource) => { + updateSource: async (source: ExternalDataSource) => { const updatedSource = await api.externalDataSources.update(source.id, source) return { ...values.dataWarehouseSources, @@ -77,7 +77,7 @@ export const dataWarehouseSettingsLogic = kea([ // Optimistic UI updates before sending updates to the backend const clonedSources = JSON.parse( JSON.stringify(values.dataWarehouseSources?.results ?? []) - ) as ExternalDataStripeSource[] + ) as ExternalDataSource[] const sourceIndex = clonedSources.findIndex((n) => n.schemas.find((m) => m.id === schema.id)) const schemaIndex = clonedSources[sourceIndex].schemas.findIndex((n) => n.id === schema.id) clonedSources[sourceIndex].schemas[schemaIndex] = schema @@ -166,7 +166,7 @@ export const dataWarehouseSettingsLogic = kea([ // Optimistic UI updates before sending updates to the backend const clonedSources = JSON.parse( JSON.stringify(values.dataWarehouseSources?.results ?? []) - ) as ExternalDataStripeSource[] + ) as ExternalDataSource[] const sourceIndex = clonedSources.findIndex((n) => n.id === source.id) clonedSources[sourceIndex].status = 'Running' clonedSources[sourceIndex].schemas = clonedSources[sourceIndex].schemas.map((n) => { diff --git a/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx b/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx new file mode 100644 index 0000000000000..9ad92924dcabc --- /dev/null +++ b/frontend/src/scenes/data-warehouse/settings/source/SourceConfiguration.tsx @@ -0,0 +1,58 @@ +import { LemonBanner, LemonButton, LemonSkeleton } from '@posthog/lemon-ui' +import { BindLogic, useValues } from 'kea' +import { Form } from 'kea-forms' +import { SourceFormComponent, SourceFormProps } from 'scenes/data-warehouse/external/forms/SourceForm' + +import { dataWarehouseSourceSettingsLogic } from './dataWarehouseSourceSettingsLogic' + +interface SourceConfigurationProps { + id: string +} + +export const SourceConfiguration = ({ id }: SourceConfigurationProps): JSX.Element => { + const { sourceFieldConfig } = useValues(dataWarehouseSourceSettingsLogic({ id })) + return ( + + {sourceFieldConfig ? ( + + ) : ( + + )} + + ) +} + +interface UpdateSourceConnectionFormContainerProps extends SourceFormProps { + id: string +} + +function UpdateSourceConnectionFormContainer(props: UpdateSourceConnectionFormContainerProps): JSX.Element { + const { source, sourceLoading } = useValues(dataWarehouseSourceSettingsLogic({ id: props.id })) + + if (source?.source_type !== 'MSSQL' && source?.source_type !== 'MySQL' && source?.source_type !== 'Postgres') { + return ( + +

+ Only Postgres, MSSQL, and MySQL are configurable. Please delete and recreate your source if you need + to connect to a new source of the same type. +

+
+ ) + } + return ( +
+ +
+ + Save + +
+ + ) +} diff --git a/frontend/src/scenes/data-warehouse/settings/source/dataWarehouseSourceSettingsLogic.ts b/frontend/src/scenes/data-warehouse/settings/source/dataWarehouseSourceSettingsLogic.ts index d7f290b850114..3343eec78e0e0 100644 --- a/frontend/src/scenes/data-warehouse/settings/source/dataWarehouseSourceSettingsLogic.ts +++ b/frontend/src/scenes/data-warehouse/settings/source/dataWarehouseSourceSettingsLogic.ts @@ -1,10 +1,12 @@ import { lemonToast } from '@posthog/lemon-ui' -import { actions, afterMount, kea, key, listeners, path, props, reducers } from 'kea' +import { actions, afterMount, kea, key, listeners, path, props, reducers, selectors } from 'kea' +import { forms } from 'kea-forms' import { loaders } from 'kea-loaders' import api from 'lib/api' import posthog from 'posthog-js' +import { SOURCE_DETAILS } from 'scenes/data-warehouse/new/sourceWizardLogic' -import { ExternalDataJob, ExternalDataSourceSchema, ExternalDataStripeSource } from '~/types' +import { ExternalDataJob, ExternalDataSource, ExternalDataSourceSchema } from '~/types' import type { dataWarehouseSourceSettingsLogicType } from './dataWarehouseSourceSettingsLogicType' @@ -26,14 +28,14 @@ export const dataWarehouseSourceSettingsLogic = kea ({ source: [ - null as ExternalDataStripeSource | null, + null as ExternalDataSource | null, { loadSource: async () => { return await api.externalDataSources.get(values.sourceId) }, updateSchema: async (schema: ExternalDataSourceSchema) => { // Optimistic UI updates before sending updates to the backend - const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataStripeSource + const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataSource const schemaIndex = clonedSource.schemas.findIndex((n) => n.id === schema.id) clonedSource.schemas[schemaIndex] = schema actions.loadSourceSuccess(clonedSource) @@ -47,6 +49,11 @@ export const dataWarehouseSourceSettingsLogic = kea { + const updatedSource = await api.externalDataSources.update(values.sourceId, source) + actions.loadSourceSuccess(updatedSource) + return updatedSource + }, }, ], jobs: [ @@ -94,7 +101,42 @@ export const dataWarehouseSourceSettingsLogic = kea [s.source], + (source) => { + if (!source) { + return null + } + return SOURCE_DETAILS[source.source_type] + }, + ], + }), + forms(({ values, actions }) => ({ + sourceConfig: { + defaults: {} as Record, + submit: async ({ payload = {} }) => { + const newJobInputs = { + ...values.source?.job_inputs, + ...payload, + } + try { + const updatedSource = await api.externalDataSources.update(values.sourceId, { + job_inputs: newJobInputs, + }) + actions.loadSourceSuccess(updatedSource) + lemonToast.success('Source updated') + } catch (e: any) { + if (e.message) { + lemonToast.error(e.message) + } else { + lemonToast.error('Cant update source at this time') + } + } + }, + }, + })), listeners(({ values, actions, cache }) => ({ loadSourceSuccess: () => { clearTimeout(cache.sourceRefreshTimeout) @@ -126,7 +168,7 @@ export const dataWarehouseSourceSettingsLogic = kea { // Optimistic UI updates before sending updates to the backend - const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataStripeSource + const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataSource const schemaIndex = clonedSource.schemas.findIndex((n) => n.id === schema.id) clonedSource.status = 'Running' clonedSource.schemas[schemaIndex].status = 'Running' @@ -147,7 +189,7 @@ export const dataWarehouseSourceSettingsLogic = kea { // Optimistic UI updates before sending updates to the backend - const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataStripeSource + const clonedSource = JSON.parse(JSON.stringify(values.source)) as ExternalDataSource const schemaIndex = clonedSource.schemas.findIndex((n) => n.id === schema.id) clonedSource.status = 'Running' clonedSource.schemas[schemaIndex].status = 'Running' diff --git a/frontend/src/scenes/debug/HogDebug.tsx b/frontend/src/scenes/debug/HogDebug.tsx index c2339d759218f..7e4f280793882 100644 --- a/frontend/src/scenes/debug/HogDebug.tsx +++ b/frontend/src/scenes/debug/HogDebug.tsx @@ -100,7 +100,7 @@ export function HogQueryEditor(props: HogQueryEditorProps): JSX.Element { interface HogDebugProps { queryKey: string query: HogQuery - setQuery?: (query: HogQuery) => void + setQuery: (query: HogQuery) => void debug?: boolean } diff --git a/frontend/src/scenes/early-access-features/EarlyAccessFeature.tsx b/frontend/src/scenes/early-access-features/EarlyAccessFeature.tsx index 4bfe2200d9b59..fffe0a16abfdc 100644 --- a/frontend/src/scenes/early-access-features/EarlyAccessFeature.tsx +++ b/frontend/src/scenes/early-access-features/EarlyAccessFeature.tsx @@ -331,6 +331,7 @@ export function EarlyAccessFeature({ id }: { id?: string } = {}): JSX.Element {

Notifications

Get notified when people opt in or out of your feature.

diff --git a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx index 513da01f0234e..60f9bedd45b37 100644 --- a/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx +++ b/frontend/src/scenes/error-tracking/ErrorTrackingScene.tsx @@ -58,7 +58,6 @@ export function ErrorTrackingScene(): JSX.Element { }, showOpenEditorButton: false, insightProps: insightProps, - alwaysRefresh: true, } return ( diff --git a/frontend/src/scenes/error-tracking/errorTrackingLogic.ts b/frontend/src/scenes/error-tracking/errorTrackingLogic.ts index 6db23821973ba..c1a847a8ab647 100644 --- a/frontend/src/scenes/error-tracking/errorTrackingLogic.ts +++ b/frontend/src/scenes/error-tracking/errorTrackingLogic.ts @@ -1,11 +1,10 @@ import type { LemonSegmentedButtonOption } from '@posthog/lemon-ui' import { actions, connect, kea, listeners, path, reducers, selectors } from 'kea' -import { UniversalFiltersGroup } from 'lib/components/UniversalFilters/UniversalFilters' import { FEATURE_FLAGS } from 'lib/constants' import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { DateRange } from '~/queries/schema' -import { FilterLogicalOperator } from '~/types' +import { FilterLogicalOperator, UniversalFiltersGroup } from '~/types' import type { errorTrackingLogicType } from './errorTrackingLogicType' diff --git a/frontend/src/scenes/error-tracking/queries.ts b/frontend/src/scenes/error-tracking/queries.ts index 0358e9096fd6d..6061773ecc562 100644 --- a/frontend/src/scenes/error-tracking/queries.ts +++ b/frontend/src/scenes/error-tracking/queries.ts @@ -1,4 +1,3 @@ -import { UniversalFiltersGroup } from 'lib/components/UniversalFilters/UniversalFilters' import { dayjs } from 'lib/dayjs' import { range } from 'lib/utils' @@ -11,7 +10,7 @@ import { InsightVizNode, NodeKind, } from '~/queries/schema' -import { AnyPropertyFilter, BaseMathType, ChartDisplayType, PropertyGroupFilter } from '~/types' +import { AnyPropertyFilter, BaseMathType, ChartDisplayType, PropertyGroupFilter, UniversalFiltersGroup } from '~/types' export type SparklineConfig = { value: number diff --git a/frontend/src/scenes/experiments/ExperimentForm.tsx b/frontend/src/scenes/experiments/ExperimentForm.tsx index 484281178822c..bcae52d655911 100644 --- a/frontend/src/scenes/experiments/ExperimentForm.tsx +++ b/frontend/src/scenes/experiments/ExperimentForm.tsx @@ -5,11 +5,12 @@ import { LemonDivider, LemonInput, LemonTextArea, Tooltip } from '@posthog/lemon import { BindLogic, useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' import { ExperimentVariantNumber } from 'lib/components/SeriesGlyph' -import { MAX_EXPERIMENT_VARIANTS } from 'lib/constants' +import { FEATURE_FLAGS, MAX_EXPERIMENT_VARIANTS } from 'lib/constants' import { IconChevronLeft } from 'lib/lemon-ui/icons' import { LemonButton } from 'lib/lemon-ui/LemonButton' import { LemonField } from 'lib/lemon-ui/LemonField' import { LemonRadio } from 'lib/lemon-ui/LemonRadio' +import { LemonSelect } from 'lib/lemon-ui/LemonSelect' import { capitalizeFirstLetter } from 'lib/utils' import { useEffect } from 'react' import { insightDataLogic } from 'scenes/insights/insightDataLogic' @@ -23,7 +24,7 @@ import { experimentLogic } from './experimentLogic' import { ExperimentInsightCreator } from './MetricSelector' const StepInfo = (): JSX.Element => { - const { experiment } = useValues(experimentLogic) + const { experiment, featureFlags } = useValues(experimentLogic) const { addExperimentGroup, removeExperimentGroup, moveToNextFormStep } = useActions(experimentLogic) return ( @@ -134,6 +135,14 @@ const StepInfo = (): JSX.Element => {
+ {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOLDOUTS] && ( +
+

Holdout group

+
Exclude a stable group of users from the experiment.
+ + +
+ )}
{ ) } +export const HoldoutSelector = (): JSX.Element => { + const { experiment, holdouts } = useValues(experimentLogic) + const { setExperiment } = useActions(experimentLogic) + + const holdoutOptions = holdouts.map((holdout) => ({ + value: holdout.id, + label: holdout.name, + })) + holdoutOptions.unshift({ value: null, label: 'No holdout' }) + + return ( +
+ { + setExperiment({ + ...experiment, + holdout_id: value, + }) + }} + data-attr="experiment-holdout-selector" + /> +
+ ) +} + export function ExperimentForm(): JSX.Element { const { currentFormStep, props } = useValues(experimentLogic) const { setCurrentFormStep } = useActions(experimentLogic) diff --git a/frontend/src/scenes/experiments/ExperimentPreview.tsx b/frontend/src/scenes/experiments/ExperimentPreview.tsx deleted file mode 100644 index 33b32b363c429..0000000000000 --- a/frontend/src/scenes/experiments/ExperimentPreview.tsx +++ /dev/null @@ -1,469 +0,0 @@ -import { IconInfo } from '@posthog/icons' -import { LemonButton, LemonDivider, LemonInput, LemonModal, Tooltip } from '@posthog/lemon-ui' -import { useActions, useValues } from 'kea' -import { Field, Form } from 'kea-forms' -import { InsightLabel } from 'lib/components/InsightLabel' -import { PropertyFilterButton } from 'lib/components/PropertyFilters/components/PropertyFilterButton' -import { TZLabel } from 'lib/components/TZLabel' -import { dayjs } from 'lib/dayjs' -import { LemonSlider } from 'lib/lemon-ui/LemonSlider' -import { humanFriendlyNumber } from 'lib/utils' -import { groupFilters } from 'scenes/feature-flags/FeatureFlags' -import { urls } from 'scenes/urls' - -import { - ActionFilter as ActionFilterType, - AnyPropertyFilter, - FilterType, - InsightType, - MultivariateFlagVariant, -} from '~/types' - -import { EXPERIMENT_EXPOSURE_INSIGHT_ID, EXPERIMENT_INSIGHT_ID } from './constants' -import { experimentLogic } from './experimentLogic' -import { ExperimentWorkflow } from './ExperimentWorkflow' -import { MetricSelector } from './MetricSelector' - -interface ExperimentPreviewProps { - experimentId: number | 'new' - trendCount: number - trendExposure?: number - funnelSampleSize?: number - funnelConversionRate: number - funnelEntrants?: number -} - -export function ExperimentPreview({ - experimentId, - trendCount, - funnelConversionRate, - trendExposure, - funnelSampleSize, - funnelEntrants, -}: ExperimentPreviewProps): JSX.Element { - const { - experimentInsightType, - editingExistingExperiment, - minimumDetectableEffect, - expectedRunningTime, - aggregationLabel, - experiment, - isExperimentGoalModalOpen, - isExperimentExposureModalOpen, - experimentLoading, - experimentMathAggregationForTrends, - } = useValues(experimentLogic({ experimentId })) - const { - setExperiment, - openExperimentGoalModal, - closeExperimentGoalModal, - updateExperimentGoal, - openExperimentExposureModal, - closeExperimentExposureModal, - updateExperimentExposure, - setNewExperimentInsight, - setExperimentExposureInsight, - } = useActions(experimentLogic({ experimentId })) - const sliderMaxValue = - experimentInsightType === InsightType.FUNNELS - ? 100 - funnelConversionRate < 50 - ? 100 - funnelConversionRate - : 50 - : 50 - - const currentDuration = dayjs().diff(dayjs(experiment?.start_date), 'hour') - - let runningTime = 0 - if (experiment?.start_date) { - runningTime = expectedRunningTime(funnelEntrants || 1, funnelSampleSize || 0, currentDuration) - } else { - runningTime = expectedRunningTime(funnelEntrants || 1, funnelSampleSize || 0) - } - - const expectedEndDate = dayjs(experiment?.start_date).add(runningTime, 'hour') - const showEndDate = !experiment?.end_date && currentDuration >= 24 && funnelEntrants && funnelSampleSize - - const targetingProperties = experiment.feature_flag?.filters - - return ( -
-
- {experimentId === 'new' && ( -
-
- Experiment preview -
-
- Here are the baseline metrics for your experiment. Adjust your minimum detectible threshold - to adjust for the smallest conversion value you'll accept, and the experiment duration.{' '} -
- -
- )} - {(experimentId === 'new' || editingExistingExperiment) && ( -
-
- Minimum detectable effect - - - -
-
- { - setExperiment({ - parameters: { - ...experiment.parameters, - minimum_detectable_effect: value, - }, - }) - }} - className="w-1/3" - /> - %} - value={experiment.parameters.minimum_detectable_effect || 5} - onChange={(value) => { - setExperiment({ - parameters: { - ...experiment.parameters, - minimum_detectable_effect: value ?? undefined, - }, - }) - }} - /> -
-
- )} -
- {experimentInsightType === InsightType.TRENDS ? ( -
- {!experiment?.start_date && ( - <> -
-
Baseline Count
-
{humanFriendlyNumber(trendCount || 0)}
-
-
-
Minimum Acceptable Count
-
- {humanFriendlyNumber( - trendCount + - Math.ceil(trendCount * ((minimumDetectableEffect || 5) / 100)) || 0 - )} -
-
- - )} -
-
Recommended running time
-
- ~{humanFriendlyNumber(trendExposure || 0)} days -
-
-
- ) : ( -
- {!experiment?.start_date && ( - <> -
-
Baseline Conversion Rate
-
{funnelConversionRate.toFixed(1)}%
-
-
-
Minimum Acceptable Conversion Rate
-
- {(funnelConversionRate + (minimumDetectableEffect || 5)).toFixed(1)}% -
-
- - )} -
-
Recommended Sample Size
-
- ~{humanFriendlyNumber(funnelSampleSize || 0)} persons -
-
- {!experiment?.start_date && ( -
-
Recommended running time
-
- ~{humanFriendlyNumber(runningTime || 0)} days -
-
- )} -
- )} -
-
-
Experiment variants
-
    - {experiment?.parameters?.feature_flag_variants?.map( - (variant: MultivariateFlagVariant, idx: number) => ( -
  • {variant.key}
  • - ) - )} -
-
-
-
Participants
-
- {targetingProperties ? ( - <> - {groupFilters(targetingProperties, undefined, aggregationLabel)} - - Check flag release conditions - - - ) : ( - '100% of all users' - )} -
-
-
-
- {experimentId !== 'new' && !editingExistingExperiment && ( -
-
Start date
- {experiment?.start_date ? ( - - ) : ( - Not started yet - )} -
- )} - {experimentInsightType === InsightType.FUNNELS && showEndDate ? ( -
-
Expected end date
- - {expectedEndDate.isAfter(dayjs()) - ? expectedEndDate.format('D MMM YYYY') - : dayjs().format('D MMM YYYY')} - -
- ) : null} - {/* The null prevents showing a 0 while loading */} - {experiment?.end_date && ( -
-
Completed date
- -
- )} -
-
- {experimentId !== 'new' && !editingExistingExperiment && ( -
-
- {experimentInsightType === InsightType.FUNNELS ? 'Conversion goal steps' : 'Trend goal'} -
- - {experiment?.start_date && ( - <> -
- - Change experiment goal - -
- {experimentInsightType === InsightType.TRENDS && - !experimentMathAggregationForTrends(experiment.filters) && ( - <> -
- Exposure metric - - - -
- {experiment.parameters?.custom_exposure_filter ? ( - - ) : ( - - Default via $feature_flag_called events - - )} -
- - - Change exposure metric - - {experiment.parameters?.custom_exposure_filter && ( - updateExperimentExposure(null)} - > - Reset exposure - - )} - -
- - )} - - )} -
- )} -
- - {experimentId !== 'new' && !editingExistingExperiment && !experiment?.start_date && ( -
- -
- )} - - - Cancel - - { - updateExperimentGoal(experiment.filters) - }} - type="primary" - loading={experimentLoading} - data-attr="create-annotation-submit" - > - Save - -
- } - > -
- - - -
- - - - Cancel - - { - if (experiment.parameters.custom_exposure_filter) { - updateExperimentExposure(experiment.parameters.custom_exposure_filter) - } - }} - type="primary" - loading={experimentLoading} - data-attr="create-annotation-submit" - > - Save - -
- } - > -
- - - -
- -
- ) -} - -export function MetricDisplay({ filters }: { filters?: FilterType }): JSX.Element { - const experimentInsightType = filters?.insight || InsightType.TRENDS - - return ( - <> - {([...(filters?.events || []), ...(filters?.actions || [])] as ActionFilterType[]) - .sort((a, b) => (a.order || 0) - (b.order || 0)) - .map((event: ActionFilterType, idx: number) => ( -
-
-
- {experimentInsightType === InsightType.FUNNELS ? (event.order || 0) + 1 : idx + 1} -
- - - -
-
- {event.properties?.map((prop: AnyPropertyFilter) => ( - - ))} -
-
- ))} - - ) -} diff --git a/frontend/src/scenes/experiments/ExperimentResult.tsx b/frontend/src/scenes/experiments/ExperimentResult.tsx deleted file mode 100644 index 4e29cfff81362..0000000000000 --- a/frontend/src/scenes/experiments/ExperimentResult.tsx +++ /dev/null @@ -1,266 +0,0 @@ -import './Experiment.scss' - -import { IconArchive } from '@posthog/icons' -import { LemonTable } from '@posthog/lemon-ui' -import { useValues } from 'kea' -import { EntityFilterInfo } from 'lib/components/EntityFilterInfo' -import { FunnelLayout } from 'lib/constants' -import { LemonProgress } from 'lib/lemon-ui/LemonProgress' - -import { filtersToQueryNode } from '~/queries/nodes/InsightQuery/utils/filtersToQueryNode' -import { queryFromFilters } from '~/queries/nodes/InsightViz/utils' -import { Query } from '~/queries/Query/Query' -import { NodeKind } from '~/queries/schema' -import { ChartDisplayType, FilterType, FunnelVizType, InsightShortId, InsightType } from '~/types' - -import { experimentLogic } from './experimentLogic' -import { LoadingState, NoResultsEmptyState } from './ExperimentView/components' -import { getExperimentInsightColour } from './utils' - -interface ExperimentResultProps { - secondaryMetricId?: number -} -export function ExperimentResult({ secondaryMetricId }: ExperimentResultProps): JSX.Element { - const { - experiment, - experimentResults, - secondaryMetricResults, - countDataForVariant, - exposureCountDataForVariant, - experimentResultsLoading, - secondaryMetricResultsLoading, - conversionRateForVariant, - getIndexForVariant, - sortedExperimentResultVariants, - experimentMathAggregationForTrends, - } = useValues(experimentLogic) - - const isSecondaryMetric = secondaryMetricId !== undefined - const targetResults = isSecondaryMetric ? secondaryMetricResults?.[secondaryMetricId] : experimentResults - const targetResultFilters = targetResults?.filters - const targetResultsInsightType = targetResultFilters?.insight || InsightType.TRENDS - const targetResultsLoading = isSecondaryMetric ? secondaryMetricResultsLoading : experimentResultsLoading - - const experimentResultVariants = experiment?.parameters?.feature_flag_variants || [] - - const validMetric = targetResults && targetResults.insight - - if (targetResultsLoading) { - return ( -
- -
- ) - } - - return ( -
- {validMetric && - (experimentResultVariants.length > 4 ? ( - <> - [ - variant, -
- {variant} -
, - ]) - ), - }, - { - header: - targetResultsInsightType === InsightType.TRENDS - ? experimentMathAggregationForTrends(targetResultFilters) - ? 'Metric' - : 'Count' - : 'Conversion Rate', - ...Object.fromEntries( - sortedExperimentResultVariants.map((variant) => [ - variant, - targetResultsInsightType === InsightType.TRENDS - ? countDataForVariant(targetResults, variant) - : `${conversionRateForVariant(targetResults, variant)}%`, - ]) - ), - }, - targetResultsInsightType === InsightType.TRENDS - ? { - header: 'Exposure', - ...Object.fromEntries( - sortedExperimentResultVariants.map((variant) => [ - variant, - exposureCountDataForVariant(targetResults, variant), - ]) - ), - } - : {}, - { - header: 'Probability to be the best', - ...Object.fromEntries( - sortedExperimentResultVariants.map((variant) => [ - variant, - targetResults.probability?.[variant] != undefined - ? `${(targetResults.probability[variant] * 100).toFixed(1)}%` - : '--', - ]) - ), - }, - ].filter((row) => Object.keys(row).length > 0) as { - [key: string]: string | JSX.Element - }[] - } - columns={[ - { title: 'Header', dataIndex: 'header' }, - ...sortedExperimentResultVariants.map((variant) => ({ - title: variant, - dataIndex: variant, - })), - ]} - /> - - ) : ( -
- { - //sort by decreasing probability, but omit the ones that are not in the results - sortedExperimentResultVariants - .filter( - (variant) => isSecondaryMetric || targetResults.probability?.hasOwnProperty(variant) - ) - .map((variant, idx) => ( -
-
- {variant} -
- {targetResultsInsightType === InsightType.TRENDS ? ( - <> -
- -
- {targetResults.insight?.[0] && - 'action' in targetResults.insight[0] && ( - - )} - - {experimentMathAggregationForTrends(targetResultFilters) - ? 'metric' - : 'count'} - : - -
-
{' '} - {countDataForVariant(targetResults, variant)} -
-
- Exposure:{' '} - {exposureCountDataForVariant(targetResults, variant)} -
- - ) : ( -
- - Conversion rate:{' '} - - {conversionRateForVariant(targetResults, variant)}% -
- )} - -
- Probability that this variant is the best:{' '} - - {targetResults.probability?.[variant] != undefined - ? (targetResults.probability?.[variant] * 100).toFixed(1) - : '--'} - % - -
-
- )) - } -
- ))} - {validMetric ? ( - // :KLUDGE: using `insights-page` for proper styling, should rather adapt styles -
- -
- ) : ( - experiment.start_date && - !targetResultsLoading && ( - <> - {isSecondaryMetric ? ( -
-
- -

- There are no results for this metric yet -

-
-
- ) : ( - - )} - - ) - )} -
- ) -} - -const transformResultFilters = (filters: Partial): Partial => ({ - ...filters, - ...(filters.insight === InsightType.FUNNELS && { - layout: FunnelLayout.vertical, - funnel_viz_type: FunnelVizType.Steps, - }), - ...(filters.insight === InsightType.TRENDS && { - display: ChartDisplayType.ActionsLineGraphCumulative, - }), -}) diff --git a/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx b/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx index 0430a50a0b41b..1c8bf5bd5e71a 100644 --- a/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/ExperimentView.tsx @@ -2,6 +2,8 @@ import '../Experiment.scss' import { LemonDivider } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' +import { FEATURE_FLAGS } from 'lib/constants' +import { featureFlagLogic } from 'lib/logic/featureFlagLogic' import { ExperimentImplementationDetails } from '../ExperimentImplementationDetails' import { experimentLogic } from '../experimentLogic' @@ -15,6 +17,7 @@ import { import { DataCollection } from './DataCollection' import { DistributionTable } from './DistributionTable' import { ExperimentExposureModal, ExperimentGoalModal, Goal } from './Goal' +import { HoldoutSelector } from './HoldoutSelector' import { Info } from './Info' import { Overview } from './Overview' import { ReleaseConditionsTable } from './ReleaseConditionsTable' @@ -24,6 +27,7 @@ import { SecondaryMetricsTable } from './SecondaryMetricsTable' export function ExperimentView(): JSX.Element { const { experiment, experimentLoading, experimentResultsLoading, experimentId, experimentResults } = useValues(experimentLogic) + const { featureFlags } = useValues(featureFlagLogic) const { updateExperimentSecondaryMetrics } = useActions(experimentLogic) @@ -47,6 +51,7 @@ export function ExperimentView(): JSX.Element {
+ {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOLDOUTS] && }
@@ -60,6 +65,7 @@ export function ExperimentView(): JSX.Element {
+ {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOLDOUTS] && }
diff --git a/frontend/src/scenes/experiments/ExperimentView/HoldoutSelector.tsx b/frontend/src/scenes/experiments/ExperimentView/HoldoutSelector.tsx new file mode 100644 index 0000000000000..f8982ad3d3636 --- /dev/null +++ b/frontend/src/scenes/experiments/ExperimentView/HoldoutSelector.tsx @@ -0,0 +1,47 @@ +import { IconInfo } from '@posthog/icons' +import { LemonSelect, Tooltip } from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' + +import { experimentLogic } from '../experimentLogic' + +export function HoldoutSelector(): JSX.Element { + const { experiment, holdouts, isExperimentRunning } = useValues(experimentLogic) + const { setExperiment, updateExperiment } = useActions(experimentLogic) + + const holdoutOptions = holdouts.map((holdout) => ({ + value: holdout.id, + label: holdout.name, + })) + holdoutOptions.unshift({ value: null, label: 'No holdout' }) + + return ( +
+
+

Holdout group

+ + + +
+
+ { + setExperiment({ + ...experiment, + holdout_id: value, + }) + updateExperiment({ holdout_id: value }) + }} + data-attr="experiment-holdout-selector" + /> +
+
+ ) +} diff --git a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx index bcfeeb81aef25..e046d0f3a52fe 100644 --- a/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/SummaryTable.tsx @@ -289,7 +289,7 @@ export function SummaryTable(): JSX.Element { return ( <> {percentage ? ( - + {percentage.toFixed(2)}% diff --git a/frontend/src/scenes/experiments/ExperimentView/components.tsx b/frontend/src/scenes/experiments/ExperimentView/components.tsx index 71e6c7f35dd25..e61c32505c857 100644 --- a/frontend/src/scenes/experiments/ExperimentView/components.tsx +++ b/frontend/src/scenes/experiments/ExperimentView/components.tsx @@ -50,10 +50,25 @@ export function VariantTag({ experimentId: number | 'new' variantKey: string }): JSX.Element { - const { experimentResults, getIndexForVariant } = useValues(experimentLogic({ experimentId })) + const { experiment, experimentResults, getIndexForVariant } = useValues(experimentLogic({ experimentId })) + + if (experiment.holdout && variantKey === `holdout-${experiment.holdout_id}`) { + return ( + +
+ {experiment.holdout.name} + + ) + } return ( - +
+ } + return ( - {tab === ExperimentsTabs.Archived ? ( - + + {featureFlags[FEATURE_FLAGS.EXPERIMENTS_HOLDOUTS] && tab === ExperimentsTabs.Holdouts ? ( + ) : ( - router.actions.push(urls.experiment('new'))} - isEmpty={shouldShowEmptyState} - customHog={ExperimentsHog} - /> - )} - {!shouldShowEmptyState && ( <> -
- + ) : ( + router.actions.push(urls.experiment('new'))} + isEmpty={shouldShowEmptyState} + customHog={ExperimentsHog} /> -
- - Status - - { - if (status) { - setSearchStatus(status as ProgressStatus | 'all') - } + )} + {!shouldShowEmptyState && ( + <> +
+ +
+ + Status + + { + if (status) { + setSearchStatus(status as ProgressStatus | 'all') + } + }} + options={ + [ + { label: 'All', value: 'all' }, + { label: 'Draft', value: ProgressStatus.Draft }, + { label: 'Running', value: ProgressStatus.Running }, + { label: 'Complete', value: ProgressStatus.Complete }, + ] as { label: string; value: string }[] + } + value={searchStatus ?? 'all'} + dropdownMatchSelectWidth={false} + dropdownMaxContentWidth + /> + + Created by + + setUserFilter(user?.uuid ?? null)} + /> +
+
+ - - Created by - - setUserFilter(user?.uuid ?? null)} - /> -
-
- + + )} )}
diff --git a/frontend/src/scenes/experiments/Holdouts.tsx b/frontend/src/scenes/experiments/Holdouts.tsx new file mode 100644 index 0000000000000..e6d7d6a13f328 --- /dev/null +++ b/frontend/src/scenes/experiments/Holdouts.tsx @@ -0,0 +1,229 @@ +import { IconPencil, IconTrash } from '@posthog/icons' +import { + LemonBanner, + LemonButton, + LemonDialog, + LemonDivider, + LemonInput, + LemonLabel, + LemonModal, + LemonTable, + LemonTableColumns, +} from '@posthog/lemon-ui' +import { useActions, useValues } from 'kea' +import { LemonSlider } from 'lib/lemon-ui/LemonSlider' +import { useState } from 'react' + +import { Holdout, holdoutsLogic, NEW_HOLDOUT } from './holdoutsLogic' + +export function Holdouts(): JSX.Element { + const { holdouts, holdoutsLoading, holdout } = useValues(holdoutsLogic) + const { createHoldout, deleteHoldout, setHoldout, updateHoldout } = useActions(holdoutsLogic) + + const [isHoldoutModalOpen, setIsHoldoutModalOpen] = useState(false) + const [editingHoldout, setEditingHoldout] = useState(null) + + const openEditModal = (holdout: Holdout): void => { + setEditingHoldout(holdout) + setHoldout(holdout) + setIsHoldoutModalOpen(true) + } + + const openCreateModal = (): void => { + setEditingHoldout(null) + setHoldout({ ...NEW_HOLDOUT }) + setIsHoldoutModalOpen(true) + } + + const closeModal = (): void => { + setIsHoldoutModalOpen(false) + setEditingHoldout(null) + } + + const getDisabledReason = (): string | undefined => { + if (!holdout.name) { + return 'Name is required' + } + if (holdout.filters?.[0]?.rollout_percentage === undefined) { + return 'Rollout percentage is required' + } + } + + const columns = [ + { + title: 'Name', + dataIndex: 'name', + key: 'name', + render: (name: string) =>
{name}
, + }, + { + title: 'Description', + dataIndex: 'description', + key: 'description', + }, + { + title: 'Rollout Percentage', + dataIndex: 'filters', + key: 'rollout', + render: (filters: Holdout['filters']) => { + const percentage = filters?.[0]?.rollout_percentage || 0 + return
{percentage} %
+ }, + }, + { + title: 'Actions', + key: 'actions', + render: (_: any, record: Holdout) => ( +
+ } + onClick={() => openEditModal(record)} + /> + } + size="xsmall" + status="danger" + onClick={() => { + LemonDialog.open({ + title: 'Delete this holdout?', + content: ( +
+ Are you sure you want to delete the holdout "{record.name}"? This action + cannot be undone. +
+ ), + primaryButton: { + children: 'Delete', + type: 'primary', + status: 'danger', + onClick: () => deleteHoldout(record.id), + size: 'small', + }, + secondaryButton: { + children: 'Cancel', + type: 'tertiary', + size: 'small', + }, + }) + }} + /> +
+ ), + }, + ] + + return ( +
+ + Cancel + { + if (editingHoldout) { + updateHoldout(editingHoldout.id, holdout) + } else { + createHoldout() + } + closeModal() + }} + disabledReason={getDisabledReason()} + > + {editingHoldout ? 'Update' : 'Save'} + + + } + > +
+
+ Name + setHoldout({ name })} + placeholder="e.g. 'Frontend holdout group 1'" + /> +
+
+ Description + setHoldout({ description })} + /> +
+
+ + +
+
+ Specify the percentage population that should be included in this holdout group. + This is stable across experiments. +
+
+
+
+ Roll out to{' '} + + setHoldout({ + filters: [{ properties: [], rollout_percentage }], + }) + } + min={0} + max={100} + step={1} + className="ml-1.5 w-20" + /> + + setHoldout({ + filters: [{ properties: [], rollout_percentage }], + }) + } + min={0} + max={100} + step="any" + suffix={%} + /> + of total users. +
+
+
+
+ + +
+
+ Holdouts are stable groups of users excluded from experiment variations.They act as a baseline, + helping you see how users behave without any changes applied. This lets you directly compare + their behavior to those exposed to the experiment variations. Once a holdout is configured, you + can apply it to an experiment during creation. +
+
+
+ + You have not created any holdouts yet.
+ } + loading={holdoutsLoading} + dataSource={holdouts} + columns={columns as LemonTableColumns} + /> + + New holdout + +
+ ) +} diff --git a/frontend/src/scenes/experiments/MetricSelector.tsx b/frontend/src/scenes/experiments/MetricSelector.tsx index 975ea070d11a4..4f4e7b6e1e262 100644 --- a/frontend/src/scenes/experiments/MetricSelector.tsx +++ b/frontend/src/scenes/experiments/MetricSelector.tsx @@ -132,6 +132,7 @@ export function ExperimentInsightCreator({ insightProps }: { insightProps: Insig seriesIndicatorType={isTrends ? undefined : 'numeric'} sortable={isTrends ? undefined : true} showNestedArrow={isTrends ? undefined : true} + showNumericalPropsOnly={isTrends} propertiesTaxonomicGroupTypes={[ TaxonomicFilterGroupType.EventProperties, TaxonomicFilterGroupType.PersonProperties, @@ -139,6 +140,8 @@ export function ExperimentInsightCreator({ insightProps }: { insightProps: Insig TaxonomicFilterGroupType.Cohorts, TaxonomicFilterGroupType.Elements, TaxonomicFilterGroupType.HogQLExpression, + TaxonomicFilterGroupType.DataWarehouseProperties, + TaxonomicFilterGroupType.DataWarehousePersonProperties, ]} />
diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 09b6597176fc8..19e33aca83831 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -54,6 +54,7 @@ import { import { EXPERIMENT_EXPOSURE_INSIGHT_ID, EXPERIMENT_INSIGHT_ID } from './constants' import type { experimentLogicType } from './experimentLogicType' import { experimentsLogic } from './experimentsLogic' +import { holdoutsLogic } from './holdoutsLogic' import { getMinimumDetectableEffect, transformFiltersForWinningVariant } from './utils' const NEW_EXPERIMENT: Experiment = { @@ -71,6 +72,7 @@ const NEW_EXPERIMENT: Experiment = { created_at: null, created_by: null, updated_at: null, + holdout_id: null, } export interface ExperimentLogicProps { @@ -112,6 +114,8 @@ export const experimentLogic = kea([ ['insightDataLoading as goalInsightDataLoading'], featureFlagLogic, ['featureFlags'], + holdoutsLogic, + ['holdouts'], ], actions: [ experimentsLogic, diff --git a/frontend/src/scenes/experiments/holdoutsLogic.tsx b/frontend/src/scenes/experiments/holdoutsLogic.tsx new file mode 100644 index 0000000000000..3f70a30d61216 --- /dev/null +++ b/frontend/src/scenes/experiments/holdoutsLogic.tsx @@ -0,0 +1,95 @@ +import { actions, events, kea, listeners, path, reducers } from 'kea' +import { loaders } from 'kea-loaders' +import api from 'lib/api' +import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast' + +import { UserBasicType } from '~/types' + +import type { holdoutsLogicType } from './holdoutsLogicType' + +export interface Holdout { + id: number | null + name: string + description: string | null + filters: Record + created_by: UserBasicType | null + created_at: string | null + updated_at: string | null +} + +export const NEW_HOLDOUT: Holdout = { + id: null, + name: '', + description: null, + filters: [ + { + properties: [], + rollout_percentage: 10, + variant: 'holdout', + }, + ], + created_by: null, + created_at: null, + updated_at: null, +} + +export const holdoutsLogic = kea([ + path(['scenes', 'experiments', 'holdoutsLogic']), + actions({ + setHoldout: (holdout: Partial) => ({ holdout }), + createHoldout: true, + updateHoldout: (id: number | null, holdout: Partial) => ({ id, holdout }), + deleteHoldout: (id: number | null) => ({ id }), + loadHoldout: (id: number | null) => ({ id }), + }), + reducers({ + holdout: [ + NEW_HOLDOUT, + { + setHoldout: (state, { holdout }) => ({ ...state, ...holdout }), + }, + ], + }), + loaders(({ values }) => ({ + holdouts: [ + [] as Holdout[], + { + loadHoldouts: async () => { + const response = await api.get(`api/projects/@current/experiment_holdouts/`) + return response.results as Holdout[] + }, + createHoldout: async () => { + const response = await api.create(`api/projects/@current/experiment_holdouts/`, values.holdout) + return [...values.holdouts, response] as Holdout[] + }, + updateHoldout: async ({ id, holdout }) => { + const response = await api.update(`api/projects/@current/experiment_holdouts/${id}/`, holdout) + return values.holdouts.map((h) => (h.id === id ? response : h)) as Holdout[] + }, + deleteHoldout: async ({ id }) => { + await api.delete(`api/projects/@current/experiment_holdouts/${id}/`) + return values.holdouts.filter((h) => h.id !== id) + }, + }, + ], + })), + listeners(({ actions }) => ({ + createHoldoutSuccess: () => { + lemonToast.success('Holdout created') + actions.loadHoldouts() + }, + updateHoldoutSuccess: () => { + lemonToast.success('Holdout updated') + actions.loadHoldouts() + }, + deleteHoldoutSuccess: () => { + lemonToast.success('Holdout deleted') + actions.loadHoldouts() + }, + })), + events(({ actions }) => ({ + afterMount: () => { + actions.loadHoldouts() + }, + })), +]) diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx index 21bd124d956c9..c440c80286283 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx @@ -1,6 +1,6 @@ import './FeatureFlag.scss' -import { IconCollapse, IconExpand, IconPlus, IconTrash } from '@posthog/icons' +import { IconBalance, IconCollapse, IconExpand, IconPlus, IconTrash } from '@posthog/icons' import { LemonDialog, LemonSegmentedButton, LemonSkeleton, LemonSwitch } from '@posthog/lemon-ui' import { useActions, useValues } from 'kea' import { Form, Group } from 'kea-forms' @@ -950,9 +950,14 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element {
-
+
Rollout - (Redistribute) + + +
{variants.map((variant, index) => ( @@ -1023,6 +1028,7 @@ function FeatureFlagRollout({ readOnly }: { readOnly?: boolean }): JSX.Element { } } }} + suffix={%} /> {filterGroups.filter((group) => group.variant === variant.key) .length > 0 && ( diff --git a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx index ccd0986e0a2af..d7d410225e514 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx +++ b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditions.tsx @@ -40,6 +40,7 @@ export function FeatureFlagReleaseConditions({ filters, onChange, hideMatchOptions, + nonEmptyFeatureFlagVariants, }: FeatureFlagReleaseConditionsLogicProps & { hideMatchOptions?: boolean isSuper?: boolean @@ -78,6 +79,8 @@ export function FeatureFlagReleaseConditions({ const { cohortsById } = useValues(cohortsModel) const { groupsAccessStatus } = useValues(groupsAccessLogic) + const featureFlagVariants = nonEmptyFeatureFlagVariants || nonEmptyVariants + const filterGroups: FeatureFlagGroupType[] = (isSuper ? filters?.super_groups : filters?.groups) || [] // :KLUDGE: Match by select only allows Select.Option as children, so render groups option directly rather than as a child const matchByGroupsIntroductionOption = GroupsIntroductionOption() @@ -335,7 +338,7 @@ export function FeatureFlagReleaseConditions({
)} - {nonEmptyVariants.length > 0 && ( + {featureFlagVariants.length > 0 && ( <> {readOnly ? ( @@ -360,7 +363,7 @@ export function FeatureFlagReleaseConditions({ allowClear={true} value={group.variant} onChange={(value) => updateConditionSet(index, undefined, undefined, value)} - options={nonEmptyVariants.map((variant) => ({ + options={featureFlagVariants.map((variant) => ({ label: variant.key, value: variant.key, }))} diff --git a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts index 3c04cb826266c..88ae79a8fa1f8 100644 --- a/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts +++ b/frontend/src/scenes/feature-flags/FeatureFlagReleaseConditionsLogic.ts @@ -11,6 +11,7 @@ import { FeatureFlagFilters, FeatureFlagGroupType, GroupTypeIndex, + MultivariateFlagVariant, PropertyFilterType, UserBlastRadiusType, } from '~/types' @@ -24,6 +25,7 @@ export interface FeatureFlagReleaseConditionsLogicProps { id?: string readOnly?: boolean onChange?: (filters: FeatureFlagFilters, errors: any) => void + nonEmptyFeatureFlagVariants?: MultivariateFlagVariant[] } export const featureFlagReleaseConditionsLogic = kea([ diff --git a/frontend/src/scenes/funnels/FunnelLineGraph.tsx b/frontend/src/scenes/funnels/FunnelLineGraph.tsx index 544b833326909..871a1c1e07520 100644 --- a/frontend/src/scenes/funnels/FunnelLineGraph.tsx +++ b/frontend/src/scenes/funnels/FunnelLineGraph.tsx @@ -87,6 +87,7 @@ export function FunnelLineGraph({ kind: NodeKind.FunnelsActorsQuery, source: querySource, funnelTrendsDropOff: false, + includeRecordings: true, funnelTrendsEntrancePeriodStart: dayjs(day).format('YYYY-MM-DD HH:mm:ss'), } openPersonsModal({ diff --git a/frontend/src/scenes/heatmaps/HeatmapsBrowser.tsx b/frontend/src/scenes/heatmaps/HeatmapsBrowser.tsx index 99c561140e765..c73214f15571e 100644 --- a/frontend/src/scenes/heatmaps/HeatmapsBrowser.tsx +++ b/frontend/src/scenes/heatmaps/HeatmapsBrowser.tsx @@ -1,5 +1,6 @@ import { IconCollapse, IconGear } from '@posthog/icons' import { LemonBanner, LemonButton, LemonInputSelect, LemonSkeleton, Spinner, Tooltip } from '@posthog/lemon-ui' +import clsx from 'clsx' import { BindLogic, useActions, useValues } from 'kea' import { AuthorizedUrlList } from 'lib/components/AuthorizedUrlList/AuthorizedUrlList' import { appEditorUrl, AuthorizedUrlListType } from 'lib/components/AuthorizedUrlList/authorizedUrlListLogic' @@ -152,7 +153,7 @@ function FilterPanel(): JSX.Element { } = useActions(logic) return ( -
+
{filterPanelCollapsed ? (
- ) : null + ) } function EmbeddedHeatmapBrowser({ @@ -226,7 +225,7 @@ function EmbeddedHeatmapBrowser({ }): JSX.Element | null { const logic = heatmapsBrowserLogic() - const { browserUrl } = useValues(logic) + const { browserUrl, loading, iframeBanner } = useValues(logic) const { onIframeLoad, setIframeWidth } = useActions(logic) const { width: iframeWidth } = useResizeObserver({ ref: iframeRef }) @@ -238,8 +237,8 @@ function EmbeddedHeatmapBrowser({
- - + {loading ? : null} + {!loading && iframeBanner ? : null}