diff --git a/.github/workflows/build-hogql-parser.yml b/.github/workflows/build-hogql-parser.yml
index 90395eaa52180..fad81d60e1495 100644
--- a/.github/workflows/build-hogql-parser.yml
+++ b/.github/workflows/build-hogql-parser.yml
@@ -52,7 +52,7 @@ jobs:
curl -s -u posthog-bot:${{ secrets.POSTHOG_BOT_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} -X POST -d "{ \"body\": \"$message_body\" }" "https://api.github.com/repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments"
fi
fi
- echo "::set-output name=parser-release-needed::$parser_release_needed"
+ echo "parser-release-needed=$parser_release_needed" >> $GITHUB_OUTPUT
build-wheels:
name: Build wheels on ${{ matrix.os }}
diff --git a/.github/workflows/container-images-cd.yml b/.github/workflows/container-images-cd.yml
index 286dc81133570..9f9c05dfcbbda 100644
--- a/.github/workflows/container-images-cd.yml
+++ b/.github/workflows/container-images-cd.yml
@@ -98,7 +98,7 @@ jobs:
- name: Check for changes in plugins directory
id: check_changes_plugins
run: |
- echo "::set-output name=changed::$(git diff --name-only HEAD^ HEAD | grep '^plugin-server/' || true)"
+ echo "changed=$(git diff --name-only HEAD^ HEAD | grep '^plugin-server/' || true)" >> $GITHUB_OUTPUT
- name: Trigger Ingestion Cloud deployment
if: steps.check_changes_plugins.outputs.changed != ''
@@ -116,7 +116,7 @@ jobs:
- name: Check for changes that affect batch exports temporal worker
id: check_changes_batch_exports_temporal_worker
run: |
- echo "::set-output name=changed::$(git diff --name-only HEAD^ HEAD | grep -E '^posthog/temporal/common|^posthog/temporal/batch_exports|^posthog/batch_exports/|^posthog/management/commands/start_temporal_worker.py$' || true)"
+ echo "changed=$(git diff --name-only HEAD^ HEAD | grep -E '^posthog/temporal/common|^posthog/temporal/batch_exports|^posthog/batch_exports/|^posthog/management/commands/start_temporal_worker.py$' || true)" >> $GITHUB_OUTPUT
- name: Trigger Batch Exports Temporal Worker Cloud deployment
if: steps.check_changes_batch_exports_temporal_worker.outputs.changed != ''
@@ -135,7 +135,7 @@ jobs:
- name: Check for changes that affect data warehouse temporal worker
id: check_changes_data_warehouse_temporal_worker
run: |
- echo "::set-output name=changed::$(git diff --name-only HEAD^ HEAD | grep -E '^posthog/temporal/common|^posthog/temporal/data_imports|^posthog/warehouse/|^posthog/management/commands/start_temporal_worker.py$' || true)"
+ echo "changed=$(git diff --name-only HEAD^ HEAD | grep -E '^posthog/temporal/common|^posthog/temporal/data_imports|^posthog/warehouse/|^posthog/management/commands/start_temporal_worker.py$' || true)" >> $GITHUB_OUTPUT
- name: Trigger Data Warehouse Temporal Worker Cloud deployment
if: steps.check_changes_data_warehouse_temporal_worker.outputs.changed != ''
diff --git a/bin/build-schema.mjs b/bin/build-schema.mjs
index 3b38e2dc5ef72..de14126b9eb34 100644
--- a/bin/build-schema.mjs
+++ b/bin/build-schema.mjs
@@ -25,4 +25,4 @@ fs.writeFile(output_path, schemaString, (err) => {
if (err) {
throw err;
}
-});
\ No newline at end of file
+});
diff --git a/ee/api/ee_event_definition.py b/ee/api/ee_event_definition.py
index e83b293b8caaa..325a845aaa804 100644
--- a/ee/api/ee_event_definition.py
+++ b/ee/api/ee_event_definition.py
@@ -10,6 +10,8 @@
Detail,
)
+from loginas.utils import is_impersonated_session
+
class EnterpriseEventDefinitionSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer):
updated_by = UserBasicSerializer(read_only=True)
@@ -98,6 +100,7 @@ def update(self, event_definition: EnterpriseEventDefinition, validated_data):
item_id=str(event_definition.id),
scope="EventDefinition",
activity="changed",
+ was_impersonated=is_impersonated_session(self.context["request"]),
detail=Detail(name=str(event_definition.name), changes=changes),
)
diff --git a/ee/api/ee_property_definition.py b/ee/api/ee_property_definition.py
index aa190bbd7c72d..308e7461942ca 100644
--- a/ee/api/ee_property_definition.py
+++ b/ee/api/ee_property_definition.py
@@ -9,6 +9,7 @@
log_activity,
Detail,
)
+from loginas.utils import is_impersonated_session
class EnterprisePropertyDefinitionSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer):
@@ -77,6 +78,7 @@ def update(self, property_definition: EnterprisePropertyDefinition, validated_da
organization_id=None,
team_id=self.context["team_id"],
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(self.context["request"]),
item_id=str(property_definition.id),
scope="PropertyDefinition",
activity="changed",
diff --git a/ee/api/integration.py b/ee/api/integration.py
index d7da62a31a2ee..8386e4271a126 100644
--- a/ee/api/integration.py
+++ b/ee/api/integration.py
@@ -19,8 +19,8 @@ class PublicIntegrationViewSet(viewsets.GenericViewSet):
queryset = Integration.objects.all()
serializer_class = IntegrationSerializer
- authentication_classes = [] # type: ignore
- permission_classes = [] # type: ignore
+ authentication_classes = []
+ permission_classes = []
@action(methods=["POST"], detail=False, url_path="slack/events")
def slack_events(self, request: Request, *args: Any, **kwargs: Any) -> Response:
diff --git a/ee/api/test/base.py b/ee/api/test/base.py
index 1fb46cceae1cd..55e7930bfadf1 100644
--- a/ee/api/test/base.py
+++ b/ee/api/test/base.py
@@ -15,7 +15,7 @@ class LicensedTestMixin:
CONFIG_LICENSE_KEY: Optional[str] = "12345::67890"
CONFIG_LICENSE_PLAN: Optional[str] = "enterprise"
- license: License = None # type: ignore
+ license: License = None
def license_required_response(
self,
@@ -30,17 +30,17 @@ def license_required_response(
@classmethod
def setUpTestData(cls):
- super().setUpTestData() # type: ignore
+ super().setUpTestData()
if cls.CONFIG_LICENSE_PLAN:
cls.license = super(LicenseManager, cast(LicenseManager, License.objects)).create(
key=cls.CONFIG_LICENSE_KEY,
plan=cls.CONFIG_LICENSE_PLAN,
valid_until=datetime.datetime(2038, 1, 19, 3, 14, 7, tzinfo=ZoneInfo("UTC")),
)
- if hasattr(cls, "organization") and cls.organization: # type: ignore
- cls.organization.available_product_features = AVAILABLE_PRODUCT_FEATURES # type: ignore
- cls.organization.update_available_features() # type: ignore
- cls.organization.save() # type: ignore
+ if hasattr(cls, "organization") and cls.organization:
+ cls.organization.available_product_features = AVAILABLE_PRODUCT_FEATURES
+ cls.organization.update_available_features()
+ cls.organization.save()
class APILicensedTest(LicensedTestMixin, APIBaseTest):
diff --git a/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel.ambr b/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel.ambr
index 711f1ac076004..a0bbfdd0f2f05 100644
--- a/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel.ambr
+++ b/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel.ambr
@@ -551,7 +551,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('finance'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.4
@@ -686,7 +687,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('finance'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.6
@@ -821,7 +823,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('technology'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelGroupBreakdown.test_funnel_breakdown_group.8
@@ -956,7 +959,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('technology'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestStrictFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events
@@ -1489,7 +1493,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('finance'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.4
@@ -1602,7 +1607,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('finance'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.6
@@ -1715,7 +1721,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('technology'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestStrictFunnelGroupBreakdown.test_funnel_breakdown_group.8
@@ -1828,7 +1835,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('technology'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestUnorderedFunnelGroupBreakdown.test_funnel_aggregate_by_groups_breakdown_group_person_on_events
@@ -2526,7 +2534,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('technology'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.14
@@ -2804,7 +2813,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('technology'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.2
@@ -3082,7 +3092,8 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('finance'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestUnorderedFunnelGroupBreakdown.test_funnel_breakdown_group.6
@@ -3360,6 +3371,7 @@
AND arrayFlatten(array(prop)) = arrayFlatten(array('finance'))
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
diff --git a/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel_correlation.ambr b/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel_correlation.ambr
index 97d0b6197b151..54a7a726449d2 100644
--- a/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel_correlation.ambr
+++ b/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel_correlation.ambr
@@ -62,7 +62,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -155,7 +156,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -255,7 +257,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -339,7 +342,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -423,7 +427,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -507,7 +512,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -579,7 +585,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -679,7 +686,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -763,7 +771,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -847,7 +856,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -931,7 +941,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -995,7 +1006,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1080,7 +1092,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1165,7 +1178,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1243,7 +1257,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1318,7 +1333,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1393,7 +1409,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1468,7 +1485,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1551,7 +1569,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1637,7 +1656,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1720,7 +1740,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1803,7 +1824,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1889,7 +1911,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -1972,7 +1995,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -2055,7 +2079,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -2138,7 +2163,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -2229,7 +2255,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -2323,7 +2350,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -2414,7 +2442,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2020-01-14 23:59:59', 'UTC') AS date_to,
toDateTime('2020-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -2489,7 +2518,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -2577,7 +2607,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -2649,7 +2680,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -2721,7 +2753,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -2793,7 +2826,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -2857,7 +2891,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -2937,7 +2972,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -3025,7 +3061,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -3097,7 +3134,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -3169,7 +3207,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -3241,7 +3280,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -3305,7 +3345,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -3387,7 +3428,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -3477,7 +3519,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -3551,7 +3594,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -3625,7 +3669,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -3699,7 +3744,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -3765,7 +3811,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -3847,7 +3894,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -3937,7 +3985,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -4011,7 +4060,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -4085,7 +4135,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -4159,7 +4210,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -4225,7 +4277,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -4313,7 +4366,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
@@ -4409,7 +4463,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -4489,7 +4544,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -4569,7 +4625,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -4649,7 +4706,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id
FROM funnel_actors
@@ -4721,7 +4779,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT concat(prop.1, '::', prop.2) as name,
countDistinctIf(actor_id, steps = target_step) AS success_count,
diff --git a/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr b/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr
index ea36a9bf3d392..c861f37a3a7c5 100644
--- a/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr
+++ b/ee/clickhouse/queries/funnels/test/__snapshots__/test_funnel_correlations_persons.ambr
@@ -98,7 +98,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2021-01-08 23:59:59', 'UTC') AS date_to,
toDateTime('2021-01-01 00:00:00', 'UTC') AS date_from,
2 AS target_step,
@@ -306,7 +307,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2, 3]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
toDateTime('2021-01-08 23:59:59', 'UTC') AS date_to,
toDateTime('2021-01-01 00:00:00', 'UTC') AS date_from,
3 AS target_step,
@@ -458,7 +460,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id ,
any(funnel_actors.matching_events) AS matching_events
@@ -590,7 +593,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id ,
any(funnel_actors.matching_events) AS matching_events
@@ -722,7 +726,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [1, 2]
- ORDER BY aggregation_target),
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000),
2 AS target_step
SELECT funnel_actors.actor_id AS actor_id ,
any(funnel_actors.matching_events) AS matching_events
diff --git a/ee/clickhouse/queries/test/__snapshots__/test_paths.ambr b/ee/clickhouse/queries/test/__snapshots__/test_paths.ambr
index 098df6f8c0933..21af96dfff353 100644
--- a/ee/clickhouse/queries/test/__snapshots__/test_paths.ambr
+++ b/ee/clickhouse/queries/test/__snapshots__/test_paths.ambr
@@ -87,7 +87,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT last_path_key as source_event,
path_key as target_event,
COUNT(*) AS event_count,
@@ -254,7 +255,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -416,7 +418,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -578,7 +581,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -740,7 +744,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -902,7 +907,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT last_path_key as source_event,
path_key as target_event,
COUNT(*) AS event_count,
@@ -1077,7 +1083,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -1247,7 +1254,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -1417,7 +1425,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -1587,7 +1596,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 1
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -1757,7 +1767,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT last_path_key as source_event,
path_key as target_event,
COUNT(*) AS event_count,
@@ -1924,7 +1935,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT last_path_key as source_event,
path_key as target_event,
COUNT(*) AS event_count,
@@ -2091,7 +2103,8 @@
steps
HAVING steps = max_steps)
WHERE steps = 2
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT last_path_key as source_event,
path_key as target_event,
COUNT(*) AS event_count,
@@ -2258,7 +2271,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT last_path_key as source_event,
path_key as target_event,
COUNT(*) AS event_count,
@@ -2425,7 +2439,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT last_path_key as source_event,
path_key as target_event,
COUNT(*) AS event_count,
@@ -2594,7 +2609,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -2758,7 +2774,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -2922,7 +2939,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -3086,7 +3104,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
@@ -3250,7 +3269,8 @@
steps
HAVING steps = max_steps)
WHERE steps IN [2, 3]
- ORDER BY aggregation_target)
+ ORDER BY aggregation_target SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000)
SELECT DISTINCT person_id AS actor_id
FROM
(SELECT person_id,
diff --git a/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel.ambr b/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel.ambr
index d608a84d783d0..95dde4841c728 100644
--- a/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel.ambr
+++ b/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel.ambr
@@ -98,7 +98,8 @@
WHERE steps IN [1, 2]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: ClickhouseTestFunnelGroups.test_funnel_group_aggregation_with_groups_entity_filtering
@@ -202,7 +203,8 @@
WHERE steps IN [1, 2]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: ClickhouseTestFunnelGroups.test_funnel_with_groups_entity_filtering
@@ -319,7 +321,8 @@
WHERE steps IN [1, 2]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: ClickhouseTestFunnelGroups.test_funnel_with_groups_global_filtering
@@ -450,6 +453,7 @@
WHERE steps IN [1, 2]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
diff --git a/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel_person.ambr b/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel_person.ambr
index af0e795b1c15b..5b85988ab77ad 100644
--- a/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel_person.ambr
+++ b/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel_person.ambr
@@ -104,6 +104,7 @@
WHERE steps IN [1, 2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
diff --git a/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel_unordered.ambr b/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel_unordered.ambr
index 91d83f0593b2c..9b03221c7ce41 100644
--- a/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel_unordered.ambr
+++ b/ee/clickhouse/views/test/funnel/__snapshots__/test_clickhouse_funnel_unordered.ambr
@@ -157,6 +157,7 @@
WHERE steps IN [1, 2]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py
index 31e76a168f43c..5849481d2f4c2 100644
--- a/ee/session_recordings/session_recording_playlist.py
+++ b/ee/session_recordings/session_recording_playlist.py
@@ -40,6 +40,7 @@
)
from posthog.session_recordings.session_recording_api import list_recordings_response
from posthog.utils import relative_date_parse
+from loginas.utils import is_impersonated_session
logger = structlog.get_logger(__name__)
@@ -52,6 +53,7 @@ def log_playlist_activity(
organization_id: UUIDT,
team_id: int,
user: User,
+ was_impersonated: bool,
changes: Optional[List[Change]] = None,
) -> None:
"""
@@ -66,6 +68,7 @@ def log_playlist_activity(
organization_id=organization_id,
team_id=team_id,
user=user,
+ was_impersonated=was_impersonated,
item_id=playlist_id,
scope="SessionRecordingPlaylist",
activity=activity,
@@ -125,6 +128,7 @@ def create(self, validated_data: Dict, *args, **kwargs) -> SessionRecordingPlayl
organization_id=self.context["request"].user.current_organization_id,
team_id=team.id,
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(self.context["request"]),
)
return playlist
@@ -150,6 +154,7 @@ def update(self, instance: SessionRecordingPlaylist, validated_data: Dict, **kwa
organization_id=self.context["request"].user.current_organization_id,
team_id=self.context["team_id"],
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(self.context["request"]),
changes=changes,
)
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration--dark.png
index c4e4bf4791737..03ed3be76614d 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration--light.png
index a50a7cdfe83cd..e606a27b97dc7 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-404--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-404--dark.png
new file mode 100644
index 0000000000000..f5331e45a4b4b
Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-404--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-404--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-404--light.png
new file mode 100644
index 0000000000000..025a455dae129
Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-404--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-empty--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-empty--dark.png
new file mode 100644
index 0000000000000..917984d79bca1
Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-empty--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-empty--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-empty--light.png
new file mode 100644
index 0000000000000..f91ed280d5bb8
Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-configuration-empty--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs--dark.png
index 49425b7762ede..e093a5f53abdd 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs--light.png
index b6e6cd7d2a8d7..ed4913123192c 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs-batch-export--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs-batch-export--dark.png
new file mode 100644
index 0000000000000..f5331e45a4b4b
Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs-batch-export--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs-batch-export--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs-batch-export--light.png
new file mode 100644
index 0000000000000..025a455dae129
Binary files /dev/null and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-logs-batch-export--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics--dark.png
index 8da167ba5bf2d..4be88bd92af87 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics--light.png
index 14d40443f7137..5b608fc07d6f2 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics-error-modal--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics-error-modal--dark.png
index 57691f32b92ee..8d0040451649e 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics-error-modal--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics-error-modal--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics-error-modal--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics-error-modal--light.png
index a34ddcc613bcb..d703740037ac5 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics-error-modal--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-app-metrics-error-modal--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-apps-management-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-apps-management-page--dark.png
index 103c966f5be29..5ef8980069537 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-apps-management-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-apps-management-page--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-apps-management-page--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-apps-management-page--light.png
index a79aa17f0b6a1..9f93174665f39 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-apps-management-page--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-apps-management-page--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png
index f6e32802728a9..0c1edf3844993 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--light.png
index 626a90dbae019..4502aa48db727 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-destinations-page--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page--dark.png
index 76f321a927289..0e6e16ae259fa 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page--light.png
index a6b29cd3f9fc4..8eb0f0246b3e6 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-filtering-page--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--dark.png
index ea6b9a9e97dcc..0c1edf3844993 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--light.png
index 6514f5fa973ab..4502aa48db727 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-landing-page--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--dark.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--dark.png
index d4aa288b61e3c..70cabae57dbdf 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--dark.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--dark.png differ
diff --git a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--light.png b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--light.png
index 5e1f9164c4104..28cda46ec0211 100644
Binary files a/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--light.png and b/frontend/__snapshots__/scenes-app-pipeline--pipeline-transformations-page-empty--light.png differ
diff --git a/frontend/__snapshots__/scenes-app-saved-insights--empty-state--light.png b/frontend/__snapshots__/scenes-app-saved-insights--empty-state--light.png
index ac5ebc7510f3f..52ee33bc4e57d 100644
Binary files a/frontend/__snapshots__/scenes-app-saved-insights--empty-state--light.png and b/frontend/__snapshots__/scenes-app-saved-insights--empty-state--light.png differ
diff --git a/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--dark.png b/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--dark.png
index 39d5318ba7312..389112f3e2c7f 100644
Binary files a/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--dark.png and b/frontend/__snapshots__/scenes-other-billing-v2--billing-unsubscribe-modal-data-pipelines--dark.png differ
diff --git a/frontend/src/layout/ErrorProjectUnavailable.tsx b/frontend/src/layout/ErrorProjectUnavailable.tsx
index 29a178c490b05..2d31f8fb5aee1 100644
--- a/frontend/src/layout/ErrorProjectUnavailable.tsx
+++ b/frontend/src/layout/ErrorProjectUnavailable.tsx
@@ -8,7 +8,7 @@ export function ErrorProjectUnavailable(): JSX.Element {
return (
-
+
{projectCreationForbiddenReason
? "Switch to a project that you have access to. If you need a new project or access to an existing one that's private, ask a team member with administrator permissions."
diff --git a/frontend/src/layout/navigation-3000/Navigation.tsx b/frontend/src/layout/navigation-3000/Navigation.tsx
index 9cfe7ed28d4a6..110c8f068fdf6 100644
--- a/frontend/src/layout/navigation-3000/Navigation.tsx
+++ b/frontend/src/layout/navigation-3000/Navigation.tsx
@@ -11,6 +11,7 @@ import { SceneConfig } from 'scenes/sceneTypes'
import { navigationLogic } from '../navigation/navigationLogic'
import { ProjectNotice } from '../navigation/ProjectNotice'
+import { Announcement } from '../navigation/TopBar/Announcement'
import { MinimalNavigation } from './components/MinimalNavigation'
import { Navbar } from './components/Navbar'
import { Sidebar } from './components/Sidebar'
@@ -46,6 +47,7 @@ export function Navigation({
{activeNavbarItem && }
+
- Welcome to PostHog's demo environment. To level up,{' '}
-
- deploy your own PostHog instance, or sign up for PostHog Cloud
-
- .
-
- )
- } else if (shownAnnouncementType === AnnouncementType.AttentionRequired) {
- message = (
-
- Attention required! Your instance has uncompleted migrations that are required for the
- next release.
-
- Click here to fix
-
-
- )
- } else if (shownAnnouncementType === AnnouncementType.CloudFlag && cloudAnnouncement) {
- message =
{cloudAnnouncement}
- } else if (shownAnnouncementType === AnnouncementType.NewFeature) {
- message =
+ if (!showAnnouncement) {
+ return null
}
return (
-
- {message}
- {closable && (
-
hideAnnouncement(shownAnnouncementType)}>
+
+
+
{cloudAnnouncement as string}
+
- )}
+
)
}
diff --git a/frontend/src/layout/navigation/TopBar/announcementLogic.test.ts b/frontend/src/layout/navigation/TopBar/announcementLogic.test.ts
index 562a5ec022e59..efb278e802c37 100644
--- a/frontend/src/layout/navigation/TopBar/announcementLogic.test.ts
+++ b/frontend/src/layout/navigation/TopBar/announcementLogic.test.ts
@@ -2,14 +2,11 @@ import { router } from 'kea-router'
import { expectLogic } from 'kea-test-utils'
import { FEATURE_FLAGS } from 'lib/constants'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
-import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic'
import { urls } from 'scenes/urls'
-import { userLogic } from 'scenes/userLogic'
import { initKeaTests } from '~/test/init'
-import { navigationLogic } from '../navigationLogic'
-import { announcementLogic, AnnouncementType, DEFAULT_CLOUD_ANNOUNCEMENT } from './announcementLogic'
+import { announcementLogic, DEFAULT_CLOUD_ANNOUNCEMENT } from './announcementLogic'
describe('announcementLogic', () => {
let logic: ReturnType
@@ -18,7 +15,7 @@ describe('announcementLogic', () => {
initKeaTests()
logic = announcementLogic()
logic.mount()
- await expectLogic(logic).toMount([featureFlagLogic, preflightLogic, userLogic, navigationLogic])
+ await expectLogic(logic).toMount([featureFlagLogic])
featureFlagLogic.actions.setFeatureFlags([FEATURE_FLAGS.CLOUD_ANNOUNCEMENT], {
[FEATURE_FLAGS.CLOUD_ANNOUNCEMENT]: true,
})
@@ -28,7 +25,7 @@ describe('announcementLogic', () => {
it('shows a cloud announcement', async () => {
await expectLogic(logic).toMatchValues({
cloudAnnouncement: DEFAULT_CLOUD_ANNOUNCEMENT,
- shownAnnouncementType: AnnouncementType.CloudFlag,
+ showAnnouncement: true,
})
})
@@ -36,7 +33,7 @@ describe('announcementLogic', () => {
router.actions.push(urls.products())
await expectLogic(logic).toMatchValues({
cloudAnnouncement: DEFAULT_CLOUD_ANNOUNCEMENT,
- shownAnnouncementType: null,
+ showAnnouncement: false,
})
})
})
diff --git a/frontend/src/layout/navigation/TopBar/announcementLogic.ts b/frontend/src/layout/navigation/TopBar/announcementLogic.ts
index 593ef92032830..5a2c56b591321 100644
--- a/frontend/src/layout/navigation/TopBar/announcementLogic.ts
+++ b/frontend/src/layout/navigation/TopBar/announcementLogic.ts
@@ -1,59 +1,23 @@
import { actions, connect, kea, path, reducers, selectors } from 'kea'
import { router } from 'kea-router'
-import { FEATURE_FLAGS, OrganizationMembershipLevel } from 'lib/constants'
+import { FEATURE_FLAGS } from 'lib/constants'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
import posthog from 'posthog-js'
-import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic'
-import { userLogic } from 'scenes/userLogic'
-import { navigationLogic } from '../navigationLogic'
import type { announcementLogicType } from './announcementLogicType'
-export enum AnnouncementType {
- Demo = 'Demo',
- CloudFlag = 'CloudFlag',
- NewFeature = 'NewFeature',
- AttentionRequired = 'AttentionRequired',
-}
-
export const DEFAULT_CLOUD_ANNOUNCEMENT =
"We're experiencing technical difficulties. Check [status.posthog.com](https://status.posthog.com) for updates."
-// Switch to `false` if we're not showing a feature announcement. Hard-coded because the announcement needs to be manually updated anyways.
-const ShowNewFeatureAnnouncement = false
-const ShowAttentionRequiredBanner = false
-
export const announcementLogic = kea([
path(['layout', 'navigation', 'TopBar', 'announcementLogic']),
connect({
- values: [
- featureFlagLogic,
- ['featureFlags'],
- preflightLogic,
- ['preflight'],
- userLogic,
- ['user'],
- navigationLogic,
- ['asyncMigrationsOk'],
- ],
+ values: [featureFlagLogic, ['featureFlags']],
}),
actions({
- hideAnnouncement: (type: AnnouncementType | null) => ({ type }),
+ hideAnnouncement: true,
}),
reducers({
- persistedClosedAnnouncements: [
- {} as Record,
- { persist: true },
- {
- hideAnnouncement: (state, { type }) => {
- // :TRICKY: We don't close cloud announcements forever, just until reload
- if (!type || type === AnnouncementType.CloudFlag) {
- return state
- }
- return { ...state, [type]: true }
- },
- },
- ],
closed: [
false,
{
@@ -62,55 +26,18 @@ export const announcementLogic = kea([
],
}),
selectors({
- closable: [
- (s) => [s.relevantAnnouncementType],
- // The demo announcement is persistent
- (relevantAnnouncementType): boolean => relevantAnnouncementType !== AnnouncementType.Demo,
- ],
- shownAnnouncementType: [
- (s) => [
- router.selectors.location,
- s.relevantAnnouncementType,
- s.closable,
- s.closed,
- s.persistedClosedAnnouncements,
- ],
- (
- { pathname },
- relevantAnnouncementType,
- closable,
- closed,
- persistedClosedAnnouncements
- ): AnnouncementType | null => {
+ showAnnouncement: [
+ (s) => [router.selectors.location, s.cloudAnnouncement, s.closed],
+ ({ pathname }, cloudAnnouncement, closed): boolean => {
if (
- (closable &&
- (closed ||
- (relevantAnnouncementType && persistedClosedAnnouncements[relevantAnnouncementType]))) || // hide if already closed
+ !cloudAnnouncement ||
+ closed ||
pathname.includes('/onboarding') ||
pathname.includes('/products') // hide during the onboarding phase
) {
- return null
- }
- return relevantAnnouncementType
- },
- ],
- relevantAnnouncementType: [
- (s) => [s.cloudAnnouncement, s.preflight, s.user, s.asyncMigrationsOk],
- (cloudAnnouncement, preflight, user, asyncMigrationsOk): AnnouncementType | null => {
- if (preflight?.demo) {
- return AnnouncementType.Demo
- } else if (cloudAnnouncement) {
- return AnnouncementType.CloudFlag
- } else if (
- ShowAttentionRequiredBanner &&
- !asyncMigrationsOk &&
- (user?.is_staff || (user?.organization?.membership_level ?? 0) >= OrganizationMembershipLevel.Admin)
- ) {
- return AnnouncementType.AttentionRequired
- } else if (ShowNewFeatureAnnouncement) {
- return AnnouncementType.NewFeature
+ return false
}
- return null
+ return true
},
],
cloudAnnouncement: [
diff --git a/frontend/src/lib/components/ActivityLog/__mocks__/activityLogMocks.ts b/frontend/src/lib/components/ActivityLog/__mocks__/activityLogMocks.ts
index 8cd97356fd826..360828f98bb67 100644
--- a/frontend/src/lib/components/ActivityLog/__mocks__/activityLogMocks.ts
+++ b/frontend/src/lib/components/ActivityLog/__mocks__/activityLogMocks.ts
@@ -791,8 +791,7 @@ export const insightsActivityResponseJson: ActivityLogItem[] = [
{
user: {
first_name: 'System',
- email: null,
- is_system: true,
+ email: 'system@x.com',
},
activity: 'exported for opengraph image',
scope: ActivityScope.INSIGHT,
diff --git a/frontend/src/lib/components/ActivityLog/humanizeActivity.tsx b/frontend/src/lib/components/ActivityLog/humanizeActivity.tsx
index e5a6d241847c7..f63aae1cb7233 100644
--- a/frontend/src/lib/components/ActivityLog/humanizeActivity.tsx
+++ b/frontend/src/lib/components/ActivityLog/humanizeActivity.tsx
@@ -2,7 +2,7 @@ import { dayjs } from 'lib/dayjs'
import { LemonMarkdown } from 'lib/lemon-ui/LemonMarkdown'
import { fullName } from 'lib/utils'
-import { ActivityScope, InsightShortId, PersonType } from '~/types'
+import { ActivityScope, InsightShortId, PersonType, UserBasicType } from '~/types'
export interface ActivityChange {
type: ActivityScope
@@ -34,21 +34,19 @@ export interface ActivityLogDetail {
type?: string
}
-export interface ActivityUser {
- email: string | null
- first_name: string
- is_system?: boolean
-}
-
export type ActivityLogItem = {
- user?: ActivityUser
+ user?: Pick
activity: string
created_at: string
scope: ActivityScope
item_id?: string
detail: ActivityLogDetail
- unread?: boolean // when used as a notification
- is_system?: boolean // when auto-created e.g. an exported image when sharing an insight
+ /** Present if the log is used as a notification. Whether the notification is unread. */
+ unread?: boolean
+ /** Whether the activity was initiated by a PostHog staff member impersonating a user. */
+ is_staff?: boolean
+ /** Whether the activity was initiated by the PostHog backend. Example: an exported image when sharing an insight. */
+ is_system?: boolean
}
// the description of a single activity log is a sentence describing one or more changes that makes up the entry
diff --git a/frontend/src/lib/components/PageHeader.tsx b/frontend/src/lib/components/PageHeader.tsx
index 2be8e7f9f1393..cf041db3e4ac9 100644
--- a/frontend/src/lib/components/PageHeader.tsx
+++ b/frontend/src/lib/components/PageHeader.tsx
@@ -7,7 +7,6 @@ import { DraggableToNotebookProps } from 'scenes/notebooks/AddToNotebook/Draggab
import { breadcrumbsLogic } from '~/layout/navigation/Breadcrumbs/breadcrumbsLogic'
interface PageHeaderProps {
- title?: string | JSX.Element
caption?: string | JSX.Element | null | false
buttons?: JSX.Element | false
tabbedPage?: boolean // Whether the page has tabs for secondary navigation
diff --git a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx
index eb327e49bf990..3862d81ce03dd 100644
--- a/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx
+++ b/frontend/src/lib/components/PropertiesTable/PropertiesTable.tsx
@@ -1,10 +1,11 @@
import './PropertiesTable.scss'
import { IconPencil } from '@posthog/icons'
-import { LemonCheckbox, LemonInput, Link } from '@posthog/lemon-ui'
+import { LemonCheckbox, LemonInput, LemonTag, Link, Tooltip } from '@posthog/lemon-ui'
import { Dropdown, Input, Menu, Popconfirm } from 'antd'
import clsx from 'clsx'
import { useValues } from 'kea'
+import { combineUrl } from 'kea-router'
import { IconDeleteForever } from 'lib/lemon-ui/icons'
import { LemonButton } from 'lib/lemon-ui/LemonButton'
import { LemonTable, LemonTableColumns, LemonTableProps } from 'lib/lemon-ui/LemonTable'
@@ -12,9 +13,10 @@ import { KEY_MAPPING, keyMappingKeys } from 'lib/taxonomy'
import { isURL } from 'lib/utils'
import { useMemo, useState } from 'react'
import { NewProperty } from 'scenes/persons/NewProperty'
+import { urls } from 'scenes/urls'
import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel'
-import { PropertyDefinitionType } from '~/types'
+import { PropertyDefinitionType, PropertyType } from '~/types'
import { CopyToClipboardInline } from '../CopyToClipboard'
import { PropertyKeyInfo } from '../PropertyKeyInfo'
@@ -136,6 +138,16 @@ function ValueDisplay({
valueComponent
)}
{propertyType || valueType}
+ {(propertyType === PropertyType.String && valueType === 'number') ||
+ (propertyType === PropertyType.Numeric && valueType === 'string') ? (
+
+
+ Type mismatch
+
+
+ ) : null}
>
) : (
diff --git a/frontend/src/lib/introductions/NewFeatureBanner.tsx b/frontend/src/lib/introductions/NewFeatureBanner.tsx
deleted file mode 100644
index da8b63c94028d..0000000000000
--- a/frontend/src/lib/introductions/NewFeatureBanner.tsx
+++ /dev/null
@@ -1,26 +0,0 @@
-import { LemonButton } from '@posthog/lemon-ui'
-import { useValues } from 'kea'
-import { Link } from 'lib/lemon-ui/Link'
-import { billingLogic } from 'scenes/billing/billingLogic'
-
-export function NewFeatureBanner(): JSX.Element | null {
- const { upgradeLink } = useValues(billingLogic)
-
- return (
-
- 🧪 Introducing Experimentation! Test changes to your product and how they impact your
- users.
-
- Upgrade
-
-
- Learn more
-
-
- )
-}
diff --git a/frontend/src/lib/utils/eventUsageLogic.ts b/frontend/src/lib/utils/eventUsageLogic.ts
index 1649fe5a211df..009e4cdb77e1f 100644
--- a/frontend/src/lib/utils/eventUsageLogic.ts
+++ b/frontend/src/lib/utils/eventUsageLogic.ts
@@ -491,7 +491,13 @@ export const eventUsageLogic = kea([
reportSurveyTemplateClicked: (template: SurveyTemplateType) => ({ template }),
reportProductUnsubscribed: (product: string) => ({ product }),
// onboarding
- reportOnboardingProductSelected: (productKey: string) => ({ productKey }),
+ reportOnboardingProductSelected: (
+ productKey: string,
+ includeFirstOnboardingProductOnUserProperties: boolean
+ ) => ({
+ productKey,
+ includeFirstOnboardingProductOnUserProperties,
+ }),
reportOnboardingCompleted: (productKey: string) => ({ productKey }),
reportSubscribedDuringOnboarding: (productKey: string) => ({ productKey }),
// command bar
@@ -1200,9 +1206,14 @@ export const eventUsageLogic = kea([
})
},
// onboarding
- reportOnboardingProductSelected: ({ productKey }) => {
+ reportOnboardingProductSelected: ({ productKey, includeFirstOnboardingProductOnUserProperties }) => {
posthog.capture('onboarding product selected', {
product_key: productKey,
+ $set_once: {
+ first_onboarding_product_selected: includeFirstOnboardingProductOnUserProperties
+ ? productKey
+ : undefined,
+ },
})
},
reportOnboardingCompleted: ({ productKey }) => {
diff --git a/frontend/src/mocks/handlers.ts b/frontend/src/mocks/handlers.ts
index d039c449c402c..5255a2015116e 100644
--- a/frontend/src/mocks/handlers.ts
+++ b/frontend/src/mocks/handlers.ts
@@ -9,7 +9,6 @@ import {
MOCK_DEFAULT_USER,
MOCK_PERSON_PROPERTIES,
MOCK_SECOND_ORGANIZATION_MEMBER,
- MOCK_TEAM_ID,
} from 'lib/api.mock'
import { getAvailableFeatures } from '~/mocks/features'
@@ -73,8 +72,7 @@ export const defaultMocks: Mocks = {
'/api/organizations/@current/plugins/repository/': [],
'/api/organizations/@current/plugins/unused/': [],
'/api/plugin_config/': toPaginatedResponse([MOCK_DEFAULT_PLUGIN_CONFIG]),
- [`/api/projects/${MOCK_TEAM_ID}/plugin_configs/${MOCK_DEFAULT_PLUGIN_CONFIG.id}/`]: MOCK_DEFAULT_PLUGIN_CONFIG,
- '/api/projects/@current/persons/properties/': toPaginatedResponse(MOCK_PERSON_PROPERTIES),
+ [`/api/projects/:team_id/plugin_configs/${MOCK_DEFAULT_PLUGIN_CONFIG.id}/`]: MOCK_DEFAULT_PLUGIN_CONFIG,
'/api/projects/:team_id/persons': EMPTY_PAGINATED_RESPONSE,
'/api/projects/:team_id/persons/properties/': toPaginatedResponse(MOCK_PERSON_PROPERTIES),
'/api/personal_api_keys/': [],
diff --git a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx
index 6c82cf18612f6..6bbe0a7f15142 100644
--- a/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx
+++ b/frontend/src/queries/nodes/InsightViz/EditorFilters.tsx
@@ -64,7 +64,7 @@ export function EditorFilters({ query, showing, embedded }: EditorFiltersProps):
shouldShowSessionAnalysisWarning,
hasFormula,
} = useValues(insightVizDataLogic(insightProps))
- const { isStepsFunnel } = useValues(funnelDataLogic(insightProps))
+ const { isStepsFunnel, isTrendsFunnel } = useValues(funnelDataLogic(insightProps))
if (!querySource) {
return null
@@ -72,7 +72,8 @@ export function EditorFilters({ query, showing, embedded }: EditorFiltersProps):
const hasBreakdown =
(isTrends && !NON_BREAKDOWN_DISPLAY_TYPES.includes(display || ChartDisplayType.ActionsLineGraph)) ||
- isStepsFunnel
+ isStepsFunnel ||
+ isTrendsFunnel
const hasPathsAdvanced = availableFeatures.includes(AvailableFeature.PATHS_ADVANCED)
const hasAttribution = isStepsFunnel
const hasPathsHogQL = isPaths && pathsFilter?.include_event_types?.includes(PathType.HogQL)
diff --git a/frontend/src/queries/schema.json b/frontend/src/queries/schema.json
index 313b48d202b74..568fa31af97c9 100644
--- a/frontend/src/queries/schema.json
+++ b/frontend/src/queries/schema.json
@@ -726,6 +726,9 @@
},
"type": "object"
},
+ "Day": {
+ "type": "integer"
+ },
"ElementPropertyFilter": {
"additionalProperties": false,
"description": "Sync with plugin-server/src/types.ts",
@@ -1783,7 +1786,14 @@
"additionalProperties": false,
"properties": {
"day": {
- "type": "string"
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "$ref": "#/definitions/Day"
+ }
+ ]
},
"interval": {
"description": "An interval selected out of available intervals in source query",
diff --git a/frontend/src/queries/schema.ts b/frontend/src/queries/schema.ts
index 5017bc205ac4a..450d72c33ecdb 100644
--- a/frontend/src/queries/schema.ts
+++ b/frontend/src/queries/schema.ts
@@ -839,10 +839,13 @@ export type InsightFilter =
| StickinessFilter
| LifecycleFilter
+/** @asType integer */
+export type Day = number
+
export interface InsightActorsQuery {
kind: NodeKind.InsightActorsQuery
source: InsightQuerySource
- day?: string
+ day?: string | Day
status?: string
/**
* An interval selected out of available intervals in source query
diff --git a/frontend/src/scenes/actions/ActionEdit.tsx b/frontend/src/scenes/actions/ActionEdit.tsx
index fe3ddafb1cbd0..8da08b54dcca1 100644
--- a/frontend/src/scenes/actions/ActionEdit.tsx
+++ b/frontend/src/scenes/actions/ActionEdit.tsx
@@ -67,32 +67,6 @@ export function ActionEdit({ action: loadedAction, id }: ActionEditLogicProps):
- }
caption={
!showIntro && (
<>
diff --git a/frontend/src/scenes/events/Events.tsx b/frontend/src/scenes/events/Events.tsx
index 3362632799bd1..3b0218a2e3a93 100644
--- a/frontend/src/scenes/events/Events.tsx
+++ b/frontend/src/scenes/events/Events.tsx
@@ -13,7 +13,7 @@ export const scene: SceneExport = {
export function Events(): JSX.Element {
return (
<>
-
+
diff --git a/frontend/src/scenes/experiments/Experiment.tsx b/frontend/src/scenes/experiments/Experiment.tsx
index 2d0924112671d..83ec676614e2c 100644
--- a/frontend/src/scenes/experiments/Experiment.tsx
+++ b/frontend/src/scenes/experiments/Experiment.tsx
@@ -150,7 +150,7 @@ export function Experiment(): JSX.Element {
if (!hasAvailableFeature(AvailableFeature.EXPERIMENTATION)) {
return (
<>
-
+
>
)
@@ -177,7 +177,6 @@ export function Experiment(): JSX.Element {
className="space-y-4 experiment-form"
>
A/B testing }
buttons={
hasAvailableFeature(AvailableFeature.EXPERIMENTATION) ? (
diff --git a/frontend/src/scenes/feature-flags/FeatureFlag.tsx b/frontend/src/scenes/feature-flags/FeatureFlag.tsx
index 3cee86a3e2f1c..029a54487e3b0 100644
--- a/frontend/src/scenes/feature-flags/FeatureFlag.tsx
+++ b/frontend/src/scenes/feature-flags/FeatureFlag.tsx
@@ -243,7 +243,6 @@ export function FeatureFlag({ id }: { id?: string } = {}): JSX.Element {
className="space-y-4"
>
- {featureFlag.key || 'Untitled'}
-
-
- {featureFlag.active ? (
-
- Enabled
-
- ) : (
-
- Disabled
-
- )}
-
-
- }
caption={
<>
{featureFlag.name || Description (optional) }
diff --git a/frontend/src/scenes/feature-flags/FeatureFlags.tsx b/frontend/src/scenes/feature-flags/FeatureFlags.tsx
index 58b263817f544..2fd98b4edb2eb 100644
--- a/frontend/src/scenes/feature-flags/FeatureFlags.tsx
+++ b/frontend/src/scenes/feature-flags/FeatureFlags.tsx
@@ -373,7 +373,6 @@ export function FeatureFlags(): JSX.Element {
return (
New feature flag
diff --git a/frontend/src/scenes/funnels/FunnelLineGraph.tsx b/frontend/src/scenes/funnels/FunnelLineGraph.tsx
index d5b7bf0dea8f8..56b54bb71b8f3 100644
--- a/frontend/src/scenes/funnels/FunnelLineGraph.tsx
+++ b/frontend/src/scenes/funnels/FunnelLineGraph.tsx
@@ -28,7 +28,7 @@ export function FunnelLineGraph({
showPersonsModal = true,
}: Omit): JSX.Element | null {
const { insightProps } = useValues(insightLogic)
- const { steps, aggregationTargetLabel, incompletenessOffsetFromEnd, interval, querySource, insightData } =
+ const { indexedSteps, aggregationTargetLabel, incompletenessOffsetFromEnd, interval, querySource, insightData } =
useValues(funnelDataLogic(insightProps))
if (!isInsightQueryNode(querySource)) {
@@ -42,8 +42,8 @@ export function FunnelLineGraph({
{
- if (!steps?.[0]?.days) {
+ if (!indexedSteps?.[0]?.days) {
return 'Trend'
}
return (
- getFormattedDate(steps[0].days?.[datum.dataIndex], interval ?? undefined) +
+ getFormattedDate(indexedSteps[0].days?.[datum.dataIndex], interval ?? undefined) +
' ' +
(insightData?.timezone ? shortTimeZone(insightData.timezone) : 'UTC')
)
diff --git a/frontend/src/scenes/funnels/funnelDataLogic.ts b/frontend/src/scenes/funnels/funnelDataLogic.ts
index 944ed7501434f..7838ac2b242f0 100644
--- a/frontend/src/scenes/funnels/funnelDataLogic.ts
+++ b/frontend/src/scenes/funnels/funnelDataLogic.ts
@@ -376,5 +376,10 @@ export const funnelDataLogic = kea([
return !skewWarningHidden && (conversionMetrics.totalRate < 0.1 || conversionMetrics.totalRate > 0.9)
},
],
+ indexedSteps: [
+ (s) => [s.steps],
+ (steps) =>
+ Array.isArray(steps) ? steps.map((step, index) => ({ ...step, seriesIndex: index, id: index })) : [],
+ ],
})),
])
diff --git a/frontend/src/scenes/groups/Group.tsx b/frontend/src/scenes/groups/Group.tsx
index 41fc57fcb6b03..869eeed842377 100644
--- a/frontend/src/scenes/groups/Group.tsx
+++ b/frontend/src/scenes/groups/Group.tsx
@@ -11,7 +11,6 @@ import { GroupDashboard } from 'scenes/groups/GroupDashboard'
import { groupLogic, GroupLogicProps } from 'scenes/groups/groupLogic'
import { RelatedGroups } from 'scenes/groups/RelatedGroups'
import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton'
-import { groupDisplayId } from 'scenes/persons/GroupActorDisplay'
import { RelatedFeatureFlags } from 'scenes/persons/RelatedFeatureFlags'
import { SceneExport } from 'scenes/sceneTypes'
import { urls } from 'scenes/urls'
@@ -77,7 +76,6 @@ export function Group(): JSX.Element {
return (
<>
}
buttons={
)}
setInsightMetadata({ name: value })}
- saveOnBlur={true}
- maxLength={400} // Sync with Insight model
- mode={!canEditInsight ? 'view' : undefined}
- data-attr="insight-name"
- notice={
- !canEditInsight
- ? {
- icon: ,
- tooltip:
- "You don't have edit permissions on any of the dashboards this insight belongs to. Ask a dashboard collaborator with edit access to add you.",
- }
- : undefined
- }
- />
- }
buttons={
+ formatBreakdownLabel(cohorts, formatPropertyValueForDisplay, v, breakdown, breakdown_type, isHistogram)
+ )
+ .join('::')
} else {
return ''
}
diff --git a/frontend/src/scenes/insights/utils/cleanFilters.test.ts b/frontend/src/scenes/insights/utils/cleanFilters.test.ts
index d49c8d03ff1cf..3d784d5b83857 100644
--- a/frontend/src/scenes/insights/utils/cleanFilters.test.ts
+++ b/frontend/src/scenes/insights/utils/cleanFilters.test.ts
@@ -154,14 +154,14 @@ describe('cleanFilters', () => {
expect(cleanedFilters).toHaveProperty('breakdown_type', undefined)
})
- it('does not include breakdown properties if funnel is not type steps', () => {
+ it('does not include breakdown properties if funnel is time to convert', () => {
const cleanedFilters = cleanFilters({
breakdowns: [{ property: 'any', type: 'event' }],
breakdown: 'something',
breakdown_type: 'event',
breakdown_group_type_index: 1,
insight: InsightType.FUNNELS,
- funnel_viz_type: FunnelVizType.Trends,
+ funnel_viz_type: FunnelVizType.TimeToConvert,
} as FunnelsFilterType)
expect(cleanedFilters).toHaveProperty('breakdowns', undefined)
diff --git a/frontend/src/scenes/insights/utils/cleanFilters.ts b/frontend/src/scenes/insights/utils/cleanFilters.ts
index 2d5a7c12582a5..67d934f66676f 100644
--- a/frontend/src/scenes/insights/utils/cleanFilters.ts
+++ b/frontend/src/scenes/insights/utils/cleanFilters.ts
@@ -72,8 +72,9 @@ function cleanBreakdownNormalizeURL(
const cleanBreakdownParams = (cleanedParams: Partial, filters: Partial): void => {
const isStepsFunnel = isFunnelsFilter(filters) && filters.funnel_viz_type === FunnelVizType.Steps
+ const isTrendsFunnel = isFunnelsFilter(filters) && filters.funnel_viz_type === FunnelVizType.Trends
const isTrends = isTrendsFilter(filters)
- const canBreakdown = isStepsFunnel || isTrends
+ const canBreakdown = isStepsFunnel || isTrendsFunnel || isTrends
const canMultiPropertyBreakdown = isStepsFunnel
diff --git a/frontend/src/scenes/instance/AsyncMigrations/AsyncMigrations.tsx b/frontend/src/scenes/instance/AsyncMigrations/AsyncMigrations.tsx
index 9b87dee4914a5..87728077e8a67 100644
--- a/frontend/src/scenes/instance/AsyncMigrations/AsyncMigrations.tsx
+++ b/frontend/src/scenes/instance/AsyncMigrations/AsyncMigrations.tsx
@@ -266,7 +266,6 @@ export function AsyncMigrations(): JSX.Element {
{user?.is_staff ? (
<>
Manage async migrations in your instance.
@@ -321,7 +320,6 @@ export function AsyncMigrations(): JSX.Element {
>
) : (
diff --git a/frontend/src/scenes/instance/DeadLetterQueue/DeadLetterQueue.tsx b/frontend/src/scenes/instance/DeadLetterQueue/DeadLetterQueue.tsx
index 6b8a6933c39f6..21974b6a3b21c 100644
--- a/frontend/src/scenes/instance/DeadLetterQueue/DeadLetterQueue.tsx
+++ b/frontend/src/scenes/instance/DeadLetterQueue/DeadLetterQueue.tsx
@@ -20,7 +20,6 @@ export function DeadLetterQueue(): JSX.Element {
if (!user?.is_staff) {
return (
@@ -40,7 +39,6 @@ export function DeadLetterQueue(): JSX.Element {
return (
Manage your instance's dead letter queue.
diff --git a/frontend/src/scenes/instance/SystemStatus/index.tsx b/frontend/src/scenes/instance/SystemStatus/index.tsx
index 7c51fff823e94..0df19f8e9c2a3 100644
--- a/frontend/src/scenes/instance/SystemStatus/index.tsx
+++ b/frontend/src/scenes/instance/SystemStatus/index.tsx
@@ -89,7 +89,6 @@ export function SystemStatus(): JSX.Element {
return (
Here you can find all the critical runtime details and settings of your PostHog instance. You
diff --git a/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx b/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx
index b3613c7ed1ade..08b5fbe1169ef 100644
--- a/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx
+++ b/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx
@@ -29,7 +29,6 @@ export function NotebookCanvas(): JSX.Element {
return (
<>
- Notebooks
-
- Beta
-
-
- }
buttons={
<>
([
members: {
__default: [] as OrganizationMemberType[],
loadMembers: async () => {
- return (await api.get('api/organizations/@current/members/?limit=200')).results
+ return (await api.get('api/organizations/@current/members/?limit=250')).results
},
removeMember: async (member: OrganizationMemberType) => {
await api.delete(`api/organizations/@current/members/${member.user.uuid}/`)
diff --git a/frontend/src/scenes/persons-management/PersonsManagementScene.tsx b/frontend/src/scenes/persons-management/PersonsManagementScene.tsx
index 87e1f318f7216..3300b21824954 100644
--- a/frontend/src/scenes/persons-management/PersonsManagementScene.tsx
+++ b/frontend/src/scenes/persons-management/PersonsManagementScene.tsx
@@ -21,7 +21,6 @@ export function PersonsManagementScene(): JSX.Element {
return (
<>
}
caption={ }
notebookProps={
url
diff --git a/frontend/src/scenes/pipeline/AppMetrics.tsx b/frontend/src/scenes/pipeline/AppMetrics.tsx
index 9b5686c0ed0a0..f8b52cea9640d 100644
--- a/frontend/src/scenes/pipeline/AppMetrics.tsx
+++ b/frontend/src/scenes/pipeline/AppMetrics.tsx
@@ -298,7 +298,8 @@ function ErrorDetailsModal({ pluginConfigId }: { pluginConfigId: number }): JSX.
// eslint-disable-next-line react/forbid-dom-props
- When:
+ When: {' '}
+
{activeErrorDetails.error_details.eventCount && (
diff --git a/frontend/src/scenes/pipeline/AppsManagement.tsx b/frontend/src/scenes/pipeline/AppsManagement.tsx
index 9a570a3718bca..bdb4741f576d0 100644
--- a/frontend/src/scenes/pipeline/AppsManagement.tsx
+++ b/frontend/src/scenes/pipeline/AppsManagement.tsx
@@ -1,7 +1,7 @@
-import { LemonBanner, LemonDivider, LemonMenu, LemonTable, Tooltip } from '@posthog/lemon-ui'
+import { LemonBanner, LemonDivider, LemonMenu, LemonTable, LemonTag, Tooltip } from '@posthog/lemon-ui'
import { Popconfirm } from 'antd'
import { useActions, useValues } from 'kea'
-import { IconDelete, IconLock, IconLockOpen } from 'lib/lemon-ui/icons'
+import { IconCloudDownload, IconDelete, IconLock, IconLockOpen, IconRefresh } from 'lib/lemon-ui/icons'
import { LemonButton } from 'lib/lemon-ui/LemonButton'
import { LemonInput } from 'lib/lemon-ui/LemonInput'
import { Link } from 'lib/lemon-ui/Link'
@@ -21,6 +21,11 @@ export const scene: SceneExport = {
}
export function AppsManagement(): JSX.Element {
+ // NOTE: We don't want to unmount appsManagementLogic once it's mounted. This is a memoization technique for
+ // `checkForUpdates`, as otherwise leaving the page and coming back to it would result in us checking for updates
+ // each time. Normally such a hack is a bit of a smell, but this is a staff-only page, so totally fine.
+ appsManagementLogic.mount()
+
const {
canInstallPlugins,
canGloballyManagePlugins,
@@ -50,6 +55,7 @@ export function AppsManagement(): JSX.Element {
Installed apps
+
{globalPlugins && (
<>
Global apps
@@ -73,9 +79,39 @@ type RenderAppsTable = {
plugins: PluginType[]
}
+function AppsToUpdate(): JSX.Element {
+ const { updatablePlugins, pluginsNeedingUpdates, checkingForUpdates } = useValues(appsManagementLogic)
+ const { checkForUpdates } = useActions(appsManagementLogic)
+
+ return (
+ <>
+ {updatablePlugins && (
+
}
+ onClick={checkForUpdates}
+ loading={checkingForUpdates}
+ >
+ {checkingForUpdates
+ ? `Checking ${Object.keys(updatablePlugins).length} apps for updates`
+ : // we by default already check all apps for updates on initial load
+ 'Check again for updates'}
+
+ )}
+ {pluginsNeedingUpdates.length > 0 && (
+ <>
+
Apps to update
+
These apps have newer commits in the repository they link to.
+
+ >
+ )}
+ >
+ )
+}
+
function AppsTable({ plugins }: RenderAppsTable): JSX.Element {
const { unusedPlugins } = useValues(appsManagementLogic)
- const { uninstallPlugin, patchPlugin } = useActions(appsManagementLogic)
+ const { uninstallPlugin, patchPlugin, updatePlugin } = useActions(appsManagementLogic)
const data = plugins.map((plugin) => ({ ...plugin, key: plugin.id }))
return (
@@ -96,6 +132,13 @@ function AppsTable({ plugins }: RenderAppsTable): JSX.Element {
<>
{plugin.name}
+ {plugin.latest_tag && plugin.tag && plugin.latest_tag !== plugin.tag && (
+
+ See update diff
+
+ )}
{plugin.description}
>
@@ -127,6 +170,16 @@ function AppsTable({ plugins }: RenderAppsTable): JSX.Element {
render: function RenderAccess(_, plugin) {
return (
+ {plugin.latest_tag && plugin.tag != plugin.latest_tag && (
+
}
+ onClick={() => updatePlugin(plugin.id)}
+ >
+ Update
+
+ )}
{plugin.is_global ? (
>
+ return Pipeline 3000 not available yet
}
const { enabledPluginConfigs, disabledPluginConfigs, shouldShowProductIntroduction } =
useValues(pipelineDestinationsLogic)
@@ -43,57 +42,40 @@ export function Destinations(): JSX.Element {
productKey={ProductKey.PIPELINE_DESTINATIONS}
description="Pipeline destinations allow you to export data outside of PostHog, such as webhooks to Slack."
docsURL="https://posthog.com/docs/cdp"
- actionElementOverride={ }
+ actionElementOverride={ }
isEmpty={true}
/>
)}
-
-
+
>
)
}
-function BatchExportsTable(): JSX.Element {
- return (
- <>
- Batch exports
-
- Backfills
- >
- )
-}
-
-function AppsTable(): JSX.Element {
- const { loading, displayablePluginConfigs, enabledPluginConfigs, disabledPluginConfigs, canConfigurePlugins } =
- useValues(pipelineDestinationsLogic)
+function DestinationsTable(): JSX.Element {
+ const { loading, destinations, canConfigurePlugins } = useValues(pipelineDestinationsLogic)
const { toggleEnabled, loadPluginConfigs } = useActions(pipelineDestinationsLogic)
- if (enabledPluginConfigs.length === 0 && disabledPluginConfigs.length === 0) {
- return <>>
- }
-
return (
<>
- Webhooks
-
- {pluginConfig.name}
+
+ {destination.name}
- {pluginConfig.description && (
+ {destination.description && (
- {pluginConfig.description}
+ {destination.description}
)}
>
@@ -102,53 +84,71 @@ function AppsTable(): JSX.Element {
},
{
title: 'App',
- render: function RenderAppInfo(_, pluginConfig) {
- return
+ render: function RenderAppInfo(_, destination) {
+ if (destination.backend === 'plugin') {
+ return
+ }
+ return <>> // TODO: batch export
+ },
+ },
+ {
+ title: 'Frequency',
+ render: function RenderFrequency(_, destination) {
+ return destination.frequency
},
},
{
- title: '24h',
- render: function Render24hDeliveryRate(_, pluginConfig) {
- let tooltip = 'No events exported in the past 24 hours'
- let value = '-'
- let tagType: LemonTagType = 'muted'
- if (
- pluginConfig.delivery_rate_24h !== null &&
- pluginConfig.delivery_rate_24h !== undefined
- ) {
- const deliveryRate = pluginConfig.delivery_rate_24h
- value = `${Math.floor(deliveryRate * 100)}%`
- tooltip = 'Success rate for past 24 hours'
- if (deliveryRate >= 0.99) {
- tagType = 'success'
- } else if (deliveryRate >= 0.75) {
- tagType = 'warning'
- } else {
- tagType = 'danger'
+ title: '24h', // TODO: two options 24h or 7d selected
+ render: function Render24hDeliveryRate(_, destination) {
+ if (destination.backend === 'plugin') {
+ let tooltip = 'No events exported in the past 24 hours'
+ let value = '-'
+ let tagType: LemonTagType = 'muted'
+ const deliveryRate = destination.success_rates['24h']
+ if (deliveryRate !== null) {
+ value = `${Math.floor(deliveryRate * 100)}%`
+ tooltip = 'Success rate for past 24 hours'
+ if (deliveryRate >= 0.99) {
+ tagType = 'success'
+ } else if (deliveryRate >= 0.75) {
+ tagType = 'warning'
+ } else {
+ tagType = 'danger'
+ }
}
+ return (
+
+
+ {value}
+
+
+ )
+ } else {
+ // Batch exports // TODO: fix this
+ const tooltip = 'No events exported in the past 24 hours'
+ return (
+
+
+ {'-'}
+
+
+ )
}
- return (
-
-
- {value}
-
-
- )
},
},
- updatedAtColumn() as LemonTableColumn,
+ updatedAtColumn() as LemonTableColumn,
{
title: 'Status',
- render: function RenderStatus(_, pluginConfig) {
+ render: function RenderStatus(_, destination) {
return (
<>
- {pluginConfig.enabled ? (
+ {destination.enabled ? (
- Enabled
+ Active
) : (
- Disabled
+ Paused
)}
>
@@ -157,81 +157,79 @@ function AppsTable(): JSX.Element {
},
{
width: 0,
- render: function Render(_, pluginConfig) {
+ render: function Render(_, destination) {
return (
{
- toggleEnabled({
- enabled: !pluginConfig.enabled,
- id: pluginConfig.id,
- })
- }}
- id={`app-${pluginConfig.id}-enable-switch`}
+ onClick={() => toggleEnabled(destination, !destination.enabled)}
+ id={`app-${destination.id}-enable-switch`}
disabledReason={
canConfigurePlugins
? undefined
- : 'You do not have permission to enable/disable apps.'
+ : 'You do not have permission to enable/disable destinations.'
}
fullWidth
>
- {pluginConfig.enabled ? 'Disable' : 'Enable'} app
-
-
- {canConfigurePlugins ? 'Edit' : 'View'} app configuration
+ {destination.enabled ? 'Pause' : 'Unpause'} destination
- View app metrics
+ {canConfigurePlugins ? 'Edit' : 'View'} destination configuration
- View app logs
+ View metrics
- View app source code
+ View logs
+ {destination.app_source_code_url && (
+
+ View app source code
+
+ )}
- {
- void deleteWithUndo({
- endpoint: `plugin_config`,
- object: {
- id: pluginConfig.id,
- name: pluginConfig.name,
- },
- callback: loadPluginConfigs,
- })
- }}
- id="app-reorder"
- disabledReason={
- canConfigurePlugins
- ? undefined
- : 'You do not have permission to delete apps.'
- }
- fullWidth
- >
- Delete app
-
+ {destination.backend === 'plugin' && (
+ {
+ void deleteWithUndo({
+ endpoint: `plugin_config`,
+ object: {
+ id: destination.id,
+ name: destination.name,
+ },
+ callback: loadPluginConfigs,
+ })
+ }}
+ id="app-delete"
+ disabledReason={
+ canConfigurePlugins
+ ? undefined
+ : 'You do not have permission to delete apps.'
+ }
+ fullWidth
+ >
+ Delete app
+
+ )}
>
}
/>
diff --git a/frontend/src/scenes/pipeline/NewButton.tsx b/frontend/src/scenes/pipeline/NewButton.tsx
index ec9ed6ab11d09..4b0a65e8dcb22 100644
--- a/frontend/src/scenes/pipeline/NewButton.tsx
+++ b/frontend/src/scenes/pipeline/NewButton.tsx
@@ -1,19 +1,20 @@
import { LemonButton } from 'lib/lemon-ui/LemonButton'
import { urls } from 'scenes/urls'
-import { PipelineTabs } from '~/types'
-
-import { singularName } from './pipelineLogic'
+import { PipelineAppKind, PipelineAppTab } from '~/types'
type NewButtonProps = {
- tab: PipelineTabs
+ kind: PipelineAppKind
}
-export function NewButton({ tab }: NewButtonProps): JSX.Element {
- const singular = singularName(tab)
+export function NewButton({ kind }: NewButtonProps): JSX.Element {
return (
-
- New {singular}
+
+ New {kind}
)
}
diff --git a/frontend/src/scenes/pipeline/Pipeline.stories.tsx b/frontend/src/scenes/pipeline/Pipeline.stories.tsx
index e7f16446ccc03..05be1e9a0d534 100644
--- a/frontend/src/scenes/pipeline/Pipeline.stories.tsx
+++ b/frontend/src/scenes/pipeline/Pipeline.stories.tsx
@@ -6,8 +6,11 @@ import { App } from 'scenes/App'
import { urls } from 'scenes/urls'
import { mswDecorator, useStorybookMocks } from '~/mocks/browser'
-import { PipelineAppTabs, PipelineTabs } from '~/types'
+import { PipelineAppKind, PipelineAppTab, PipelineTab } from '~/types'
+import batchExports from './__mocks__/batchExports.json'
+import pluginConfigs from './__mocks__/pluginConfigs.json'
+import plugins from './__mocks__/plugins.json'
import { appMetricsLogic } from './appMetricsLogic'
import { appsManagementLogic } from './appsManagementLogic'
import { pipelineLogic } from './pipelineLogic'
@@ -18,9 +21,14 @@ export default {
// mocks used by all stories in this file
mswDecorator({
get: {
- 'api/organizations/@current/pipeline_transformations/': {},
- 'api/organizations/@current/plugins/': {},
- 'api/projects/:team_id/pipeline_transformations_configs/': {},
+ '/api/projects/:team_id/batch_exports/': batchExports,
+ '/api/organizations/:organization_id/batch_exports/': batchExports,
+ '/api/organizations/@current/plugins/': plugins,
+ '/api/organizations/@current/pipeline_transformations/': plugins,
+ '/api/projects/:team_id/pipeline_transformation_configs/': pluginConfigs,
+ // TODO: Differentiate between transformation and destination mocks for nicer mocks
+ '/api/organizations/@current/pipeline_destinations/': plugins,
+ '/api/projects/:team_id/pipeline_destination_configs/': pluginConfigs,
},
}),
],
@@ -33,6 +41,13 @@ export default {
}, // scene mode
} as Meta
+const eventSequenceTimerPluginConfigId = pluginConfigs.results.find(
+ (conf) => conf.plugin === plugins.results.find((plugin) => plugin.name === 'Event Sequence Timer Plugin')!.id
+)!.id
+const geoIpConfigId = pluginConfigs.results.find(
+ (conf) => conf.plugin === plugins.results.find((plugin) => plugin.name === 'GeoIP')!.id
+)!.id
+
export function PipelineLandingPage(): JSX.Element {
// also Destinations page
useEffect(() => {
@@ -41,9 +56,10 @@ export function PipelineLandingPage(): JSX.Element {
}, [])
return
}
+
export function PipelineFilteringPage(): JSX.Element {
useEffect(() => {
- router.actions.push(urls.pipeline(PipelineTabs.Filters))
+ router.actions.push(urls.pipeline(PipelineTab.Filters))
pipelineLogic.mount()
}, [])
return
@@ -51,34 +67,23 @@ export function PipelineFilteringPage(): JSX.Element {
export function PipelineTransformationsPageEmpty(): JSX.Element {
useEffect(() => {
- router.actions.push(urls.pipeline(PipelineTabs.Transformations))
+ router.actions.push(urls.pipeline(PipelineTab.Transformations))
pipelineLogic.mount()
}, [])
return
}
export function PipelineTransformationsPage(): JSX.Element {
- useStorybookMocks({
- get: {
- 'api/organizations/@current/pipeline_transformations/': require('./__mocks__/plugins.json'),
- 'api/projects/:team_id/pipeline_transformations_configs/': require('./__mocks__/transformationPluginConfigs.json'),
- },
- })
useEffect(() => {
- router.actions.push(urls.pipeline(PipelineTabs.Transformations))
+ router.actions.push(urls.pipeline(PipelineTab.Transformations))
pipelineLogic.mount()
}, [])
return
}
+
export function PipelineDestinationsPage(): JSX.Element {
- useStorybookMocks({
- get: {
- 'api/organizations/@current/pipeline_destinations/': require('./__mocks__/plugins.json'),
- 'api/projects/:team_id/pipeline_destinations_configs/': require('./__mocks__/transformationPluginConfigs.json'),
- },
- })
useEffect(() => {
- router.actions.push(urls.pipeline(PipelineTabs.Destinations))
+ router.actions.push(urls.pipeline(PipelineTab.Destinations))
pipelineLogic.mount()
}, [])
return
@@ -86,7 +91,27 @@ export function PipelineDestinationsPage(): JSX.Element {
export function PipelineAppConfiguration(): JSX.Element {
useEffect(() => {
- router.actions.push(urls.pipelineApp(1, PipelineAppTabs.Configuration))
+ router.actions.push(
+ urls.pipelineApp(
+ PipelineAppKind.Destination,
+ eventSequenceTimerPluginConfigId,
+ PipelineAppTab.Configuration
+ )
+ )
+ }, [])
+ return
+}
+
+export function PipelineAppConfigurationEmpty(): JSX.Element {
+ useEffect(() => {
+ router.actions.push(urls.pipelineApp(PipelineAppKind.Destination, geoIpConfigId, PipelineAppTab.Configuration))
+ }, [])
+ return
+}
+
+export function PipelineAppConfiguration404(): JSX.Element {
+ useEffect(() => {
+ router.actions.push(urls.pipelineApp(PipelineAppKind.Destination, 4239084923809, PipelineAppTab.Configuration))
}, [])
return
}
@@ -94,13 +119,13 @@ export function PipelineAppConfiguration(): JSX.Element {
export function PipelineAppMetrics(): JSX.Element {
useStorybookMocks({
get: {
- 'api/projects/:team_id/app_metrics/4?date_from=-7d': require('./__mocks__/pluginMetrics.json'),
- 'api/projects/:team_id/app_metrics/4/error_details?error_type=Error': require('./__mocks__/pluginErrorDetails.json'),
+ '/api/projects/:team_id/app_metrics/:plugin_config_id?date_from=-7d': require('./__mocks__/pluginMetrics.json'),
+ '/api/projects/:team_id/app_metrics/:plugin_config_id/error_details?error_type=Error': require('./__mocks__/pluginErrorDetails.json'),
},
})
useEffect(() => {
- router.actions.push(urls.pipelineApp(4, PipelineAppTabs.Metrics))
- appMetricsLogic({ pluginConfigId: 4 }).mount()
+ router.actions.push(urls.pipelineApp(PipelineAppKind.Destination, geoIpConfigId, PipelineAppTab.Metrics))
+ appMetricsLogic({ pluginConfigId: geoIpConfigId }).mount()
}, [])
return
}
@@ -108,13 +133,13 @@ export function PipelineAppMetrics(): JSX.Element {
export function PipelineAppMetricsErrorModal(): JSX.Element {
useStorybookMocks({
get: {
- 'api/projects/:team_id/app_metrics/4?date_from=-7d': require('./__mocks__/pluginMetrics.json'),
- 'api/projects/:team_id/app_metrics/4/error_details?error_type=Error': require('./__mocks__/pluginErrorDetails.json'),
+ '/api/projects/:team_id/app_metrics/:plugin_config_id?date_from=-7d': require('./__mocks__/pluginMetrics.json'),
+ '/api/projects/:team_id/app_metrics/:plugin_config_id/error_details?error_type=Error': require('./__mocks__/pluginErrorDetails.json'),
},
})
useEffect(() => {
- router.actions.push(urls.pipelineApp(4, PipelineAppTabs.Metrics))
- const logic = appMetricsLogic({ pluginConfigId: 4 })
+ router.actions.push(urls.pipelineApp(PipelineAppKind.Destination, geoIpConfigId, PipelineAppTab.Metrics))
+ const logic = appMetricsLogic({ pluginConfigId: geoIpConfigId })
logic.mount()
logic.actions.openErrorDetailsModal('Error')
}, [])
@@ -124,24 +149,32 @@ export function PipelineAppMetricsErrorModal(): JSX.Element {
export function PipelineAppLogs(): JSX.Element {
useStorybookMocks({
get: {
- 'api/projects/:team_id/plugin_configs/1/logs': require('./__mocks__/pluginLogs.json'),
+ '/api/projects/:team_id/plugin_configs/:plugin_config_id/logs': require('./__mocks__/pluginLogs.json'),
},
})
useEffect(() => {
- router.actions.push(urls.pipelineApp(1, PipelineAppTabs.Logs))
+ router.actions.push(urls.pipelineApp(PipelineAppKind.Destination, geoIpConfigId, PipelineAppTab.Logs))
}, [])
return
}
-export function PipelineAppsManagementPage(): JSX.Element {
+export function PipelineAppLogsBatchExport(): JSX.Element {
useStorybookMocks({
get: {
- 'api/organizations/@current/plugins/': require('./__mocks__/plugins.json'),
+ '/api/projects/:team_id/batch_exports/:export_id/logs': require('./__mocks__/batchExportLogs.json'),
},
})
+ useEffect(() => {
+ router.actions.push(
+ urls.pipelineApp(PipelineAppKind.Destination, batchExports.results[0].id, PipelineAppTab.Logs)
+ )
+ }, [])
+ return
+}
+export function PipelineAppsManagementPage(): JSX.Element {
useEffect(() => {
- router.actions.push(urls.pipeline(PipelineTabs.AppsManagement))
+ router.actions.push(urls.pipeline(PipelineTab.AppsManagement))
appsManagementLogic.mount()
}, [])
return
diff --git a/frontend/src/scenes/pipeline/Pipeline.tsx b/frontend/src/scenes/pipeline/Pipeline.tsx
index 0a60c8af63bef..f131df44923d3 100644
--- a/frontend/src/scenes/pipeline/Pipeline.tsx
+++ b/frontend/src/scenes/pipeline/Pipeline.tsx
@@ -5,39 +5,41 @@ import { LemonTabs } from 'lib/lemon-ui/LemonTabs'
import { SceneExport } from 'scenes/sceneTypes'
import { urls } from 'scenes/urls'
-import { PipelineTabs } from '~/types'
+import { PipelineTab } from '~/types'
import { AppsManagement } from './AppsManagement'
import { Destinations } from './Destinations'
import { NewButton } from './NewButton'
+import { PIPELINE_TAB_TO_APP_KIND } from './PipelineApp'
import { humanFriendlyTabName, pipelineLogic } from './pipelineLogic'
import { Transformations } from './Transformations'
export function Pipeline(): JSX.Element {
const { currentTab } = useValues(pipelineLogic)
- const tab_to_content: Record = {
- [PipelineTabs.Filters]: Coming soon
,
- [PipelineTabs.Transformations]: ,
- [PipelineTabs.Destinations]: ,
- [PipelineTabs.AppsManagement]: ,
+ const tabToContent: Record = {
+ [PipelineTab.Filters]: Coming soon
,
+ [PipelineTab.Transformations]: ,
+ [PipelineTab.Destinations]: ,
+ [PipelineTab.AppsManagement]: ,
}
+ const maybeKind = PIPELINE_TAB_TO_APP_KIND[currentTab]
+
return (
}
+ buttons={maybeKind ?
: undefined}
/>
router.actions.push(urls.pipeline(tab as PipelineTabs))}
- tabs={Object.values(PipelineTabs).map((tab) => ({
+ onChange={(tab) => router.actions.push(urls.pipeline(tab as PipelineTab))}
+ tabs={Object.values(PipelineTab).map((tab) => ({
// TODO: Hide admin management based on `canGloballyManagePlugins` permission
label: humanFriendlyTabName(tab),
key: tab,
- content: tab_to_content[tab],
+ content: tabToContent[tab],
}))}
/>
diff --git a/frontend/src/scenes/pipeline/PipelineApp.tsx b/frontend/src/scenes/pipeline/PipelineApp.tsx
index b3b1ddd3fac33..932b48f44ccd7 100644
--- a/frontend/src/scenes/pipeline/PipelineApp.tsx
+++ b/frontend/src/scenes/pipeline/PipelineApp.tsx
@@ -1,56 +1,86 @@
-import { Spinner } from '@posthog/lemon-ui'
import { useValues } from 'kea'
-import { router } from 'kea-router'
+import { NotFound } from 'lib/components/NotFound'
import { PageHeader } from 'lib/components/PageHeader'
import { FEATURE_FLAGS } from 'lib/constants'
-import { LemonTabs } from 'lib/lemon-ui/LemonTabs/LemonTabs'
+import { LemonTab, LemonTabs } from 'lib/lemon-ui/LemonTabs/LemonTabs'
import { featureFlagLogic } from 'lib/logic/featureFlagLogic'
import { capitalizeFirstLetter } from 'lib/utils'
-import { PluginLogs } from 'scenes/plugins/plugin/PluginLogs'
+import { PipelineAppLogs } from 'scenes/pipeline/PipelineAppLogs'
import { SceneExport } from 'scenes/sceneTypes'
import { urls } from 'scenes/urls'
-import { PipelineAppTabs } from '~/types'
+import { PipelineAppKind, PipelineAppTab, PipelineTab } from '~/types'
import { AppMetrics } from './AppMetrics'
-import { pipelineAppLogic } from './pipelineAppLogic'
+import { PipelineAppConfiguration } from './PipelineAppConfiguration'
+import { pipelineAppLogic, PipelineAppLogicProps } from './pipelineAppLogic'
+
+export const PIPELINE_TAB_TO_APP_KIND: Partial> = {
+ [PipelineTab.Filters]: PipelineAppKind.Filter,
+ [PipelineTab.Transformations]: PipelineAppKind.Transformation,
+ [PipelineTab.Destinations]: PipelineAppKind.Destination,
+}
+
+const paramsToProps = ({
+ params: { kindTab, id },
+}: {
+ params: { kindTab?: string; id?: string }
+}): PipelineAppLogicProps => {
+ const numericId = id && /^\d+$/.test(id) ? parseInt(id) : undefined
+ if (!kindTab || !id) {
+ throw new Error('Loaded the PipelineApp without either `kindTab` or `id` passed in')
+ }
+
+ return {
+ kind: PIPELINE_TAB_TO_APP_KIND[kindTab] || null,
+ id: numericId && !isNaN(numericId) ? numericId : id,
+ }
+}
export const scene: SceneExport = {
component: PipelineApp,
logic: pipelineAppLogic,
- paramsToProps: ({ params: { id } }: { params: { id?: string } }) => ({ id: id ? parseInt(id) : 'new' }),
+ paramsToProps,
}
-export function PipelineApp({ id }: { id?: string } = {}): JSX.Element {
+export function PipelineApp(params: { kindTab?: string; id?: string } = {}): JSX.Element {
+ const { kind, id } = paramsToProps({ params })
+
+ const { currentTab, loading, maybePlugin } = useValues(pipelineAppLogic)
const { featureFlags } = useValues(featureFlagLogic)
+
if (!featureFlags[FEATURE_FLAGS.PIPELINE_UI]) {
- return <>>
+ return Pipeline 3000 not available yet
}
- const { currentTab } = useValues(pipelineAppLogic)
- const confId = id ? parseInt(id) : undefined
+ if (!kind) {
+ return
+ }
- if (!confId) {
- return
+ if (!loading && !maybePlugin) {
+ return
}
- const tab_to_content: Record = {
- [PipelineAppTabs.Configuration]: Configuration editing
,
- [PipelineAppTabs.Metrics]: ,
- [PipelineAppTabs.Logs]: ,
+ const tabToContent: Record = {
+ [PipelineAppTab.Configuration]: ,
+ [PipelineAppTab.Metrics]: ,
+ [PipelineAppTab.Logs]: ,
}
return (
-
+
router.actions.push(urls.pipelineApp(confId, tab as PipelineAppTabs))}
- tabs={Object.values(PipelineAppTabs).map((tab) => ({
- label: capitalizeFirstLetter(tab),
- key: tab,
- content: tab_to_content[tab],
- }))}
+ tabs={Object.values(PipelineAppTab).map(
+ (tab) =>
+ ({
+ label: capitalizeFirstLetter(tab),
+ key: tab,
+ content: tabToContent[tab],
+ link: params.kindTab ? urls.pipelineApp(kind, id, tab as PipelineAppTab) : undefined,
+ } as LemonTab)
+ )}
/>
)
diff --git a/frontend/src/scenes/pipeline/PipelineAppConfiguration.tsx b/frontend/src/scenes/pipeline/PipelineAppConfiguration.tsx
new file mode 100644
index 0000000000000..830390ee2fc6f
--- /dev/null
+++ b/frontend/src/scenes/pipeline/PipelineAppConfiguration.tsx
@@ -0,0 +1,127 @@
+import { LemonSkeleton, LemonWidget, Tooltip } from '@posthog/lemon-ui'
+import { useActions, useValues } from 'kea'
+import { Form } from 'kea-forms'
+import { Field } from 'lib/forms/Field'
+import { IconLock } from 'lib/lemon-ui/icons'
+import { LemonMarkdown } from 'lib/lemon-ui/LemonMarkdown'
+import React, { useEffect, useState } from 'react'
+import {
+ defaultConfigForPlugin,
+ determineInvisibleFields,
+ determineRequiredFields,
+ getConfigSchemaArray,
+ isValidField,
+} from 'scenes/pipeline/configUtils'
+import { PluginField } from 'scenes/plugins/edit/PluginField'
+
+import { pipelineAppLogic } from './pipelineAppLogic'
+
+export function PipelineAppConfiguration(): JSX.Element {
+ const { appBackend } = useValues(pipelineAppLogic)
+
+ if (appBackend === 'plugin') {
+ return (
+
+
+
+ )
+ }
+
+ return Unsupported app type
+}
+
+function WebhookAppConfiguration(): JSX.Element {
+ const { maybePlugin, maybePluginConfig, configuration, kind } = useValues(pipelineAppLogic)
+ const { resetConfiguration, setConfigurationValues } = useActions(pipelineAppLogic)
+
+ const [invisibleFields, setInvisibleFields] = useState([])
+ const [requiredFields, setRequiredFields] = useState([])
+
+ const updateInvisibleAndRequiredFields = (): void => {
+ setInvisibleFields(
+ maybePlugin ? determineInvisibleFields((fieldName) => configuration[fieldName], maybePlugin) : []
+ )
+ setRequiredFields(
+ maybePlugin ? determineRequiredFields((fieldName) => configuration[fieldName], maybePlugin) : []
+ )
+ }
+
+ useEffect(() => {
+ if (maybePlugin && maybePluginConfig) {
+ setConfigurationValues({
+ ...(maybePluginConfig.config || defaultConfigForPlugin(maybePlugin)),
+ __enabled: maybePluginConfig.enabled,
+ })
+ } else {
+ resetConfiguration()
+ }
+ updateInvisibleAndRequiredFields()
+ }, [maybePlugin?.id, maybePlugin?.config_schema])
+
+ if (!maybePlugin) {
+ // This will never show up when we realize that the plugin doesn't exist, since then the whole scene is NotFound
+ return (
+
+ {Array(2)
+ .fill(null)
+ .map((_, index) => (
+
+
+
+
+ ))}
+
+ )
+ }
+
+ const configSchemaArray = getConfigSchemaArray(maybePlugin.config_schema)
+
+ if (configSchemaArray.length === 0) {
+ return This {kind} isn't configurable.
+ }
+
+ const fields = configSchemaArray.map((fieldConfig, index) => (
+
+ {fieldConfig.key &&
+ fieldConfig.type &&
+ isValidField(fieldConfig) &&
+ !invisibleFields.includes(fieldConfig.key) ? (
+
+ {fieldConfig.secret && (
+
+
+
+ )}
+ {fieldConfig.markdown && {fieldConfig.markdown} }
+ {fieldConfig.name || fieldConfig.key}
+ >
+ }
+ help={fieldConfig.hint && {fieldConfig.hint} }
+ name={fieldConfig.key}
+ showOptional={!fieldConfig.required && !requiredFields.includes(fieldConfig.key)}
+ >
+
+
+ ) : (
+ <>
+ {fieldConfig.type ? (
+
+ Invalid config field {fieldConfig.name || fieldConfig.key} .
+
+ ) : null}
+ >
+ )}
+
+ ))
+
+ return (
+
+ {fields}
+
+ )
+}
diff --git a/frontend/src/scenes/pipeline/PipelineAppLogs.tsx b/frontend/src/scenes/pipeline/PipelineAppLogs.tsx
new file mode 100644
index 0000000000000..a8149694ae204
--- /dev/null
+++ b/frontend/src/scenes/pipeline/PipelineAppLogs.tsx
@@ -0,0 +1,78 @@
+import { LemonButton, LemonCheckbox, LemonInput, LemonTable } from '@posthog/lemon-ui'
+import { useActions, useValues } from 'kea'
+import { LOGS_PORTION_LIMIT } from 'lib/constants'
+import { pluralize } from 'lib/utils'
+
+import { PipelineAppLogicProps } from './pipelineAppLogic'
+import { PipelineAppLogLevel, pipelineAppLogsLogic } from './pipelineAppLogsLogic'
+
+export function PipelineAppLogs({ id, kind }: PipelineAppLogicProps): JSX.Element {
+ const logic = pipelineAppLogsLogic({ id, kind })
+
+ const { logs, logsLoading, backgroundLogs, columns, isThereMoreToLoad, selectedLogLevels } = useValues(logic)
+ const { revealBackground, loadMoreLogs, setSelectedLogLevels, setSearchTerm } = useActions(logic)
+
+ return (
+
+
+
+ Show logs of level:
+ {Object.values(PipelineAppLogLevel).map((level) => {
+ return (
+ {
+ const newLogLevels = checked
+ ? [...selectedLogLevels, level]
+ : selectedLogLevels.filter((t) => t != level)
+ setSelectedLogLevels(newLogLevels)
+ }}
+ />
+ )
+ })}
+
+
+ {backgroundLogs.length
+ ? `Load ${pluralize(backgroundLogs.length, 'newer entry', 'newer entries')}`
+ : 'No new entries'}
+
+
+
+ {!!logs.length && (
+
+ {isThereMoreToLoad ? `Load up to ${LOGS_PORTION_LIMIT} older entries` : 'No older entries'}
+
+ )}
+
+ )
+}
diff --git a/frontend/src/scenes/pipeline/Transformations.tsx b/frontend/src/scenes/pipeline/Transformations.tsx
index ed6a8a123857e..ce1cdcc0b5eec 100644
--- a/frontend/src/scenes/pipeline/Transformations.tsx
+++ b/frontend/src/scenes/pipeline/Transformations.tsx
@@ -24,7 +24,13 @@ import { deleteWithUndo } from 'lib/utils/deleteWithUndo'
import { PluginImage } from 'scenes/plugins/plugin/PluginImage'
import { urls } from 'scenes/urls'
-import { PipelineAppTabs, PipelineTabs, PluginConfigTypeNew, PluginConfigWithPluginInfoNew, ProductKey } from '~/types'
+import {
+ PipelineAppKind,
+ PipelineAppTab,
+ PluginConfigTypeNew,
+ PluginConfigWithPluginInfoNew,
+ ProductKey,
+} from '~/types'
import { NewButton } from './NewButton'
import { pipelineTransformationsLogic } from './transformationsLogic'
@@ -33,7 +39,7 @@ import { RenderApp } from './utils'
export function Transformations(): JSX.Element {
const { featureFlags } = useValues(featureFlagLogic)
if (!featureFlags[FEATURE_FLAGS.PIPELINE_UI]) {
- return <>>
+ return Pipeline 3000 not available yet
}
const {
loading,
@@ -56,7 +62,7 @@ export function Transformations(): JSX.Element {
productKey={ProductKey.PIPELINE_TRANSFORMATIONS}
description="Pipeline transformations allow you to enrich your data with additional information, such as geolocation."
docsURL="https://posthog.com/docs/cdp"
- actionElementOverride={ }
+ actionElementOverride={ }
isEmpty={true}
/>
)}
@@ -109,8 +115,9 @@ export function Transformations(): JSX.Element {
{pluginConfig.name}
@@ -190,8 +197,9 @@ export function Transformations(): JSX.Element {
)}
View app metrics
@@ -234,7 +250,7 @@ export function Transformations(): JSX.Element {
callback: loadPluginConfigs,
})
}}
- id={`app-reorder`}
+ id={`app-delete`}
disabledReason={
canConfigurePlugins
? undefined
diff --git a/frontend/src/scenes/pipeline/__mocks__/batchExportLogs.json b/frontend/src/scenes/pipeline/__mocks__/batchExportLogs.json
new file mode 100644
index 0000000000000..8bd35cab9d8cc
--- /dev/null
+++ b/frontend/src/scenes/pipeline/__mocks__/batchExportLogs.json
@@ -0,0 +1,39 @@
+{
+ "count": 4,
+ "next": null,
+ "previous": null,
+ "results": [
+ {
+ "team_id": 2,
+ "batch_export_id": "018cf79f-a9e5-0001-cd6a-edc4886d939d",
+ "run_id": "62acf018-c2e4-47a8-88ff-20839ca84816",
+ "timestamp": "2024-01-11T09:09:07.849132Z",
+ "level": "INFO",
+ "message": "Successfully finished exporting batch 2024-01-11 08:00:00+00:00 - 2024-01-11 09:00:00+00:00"
+ },
+ {
+ "team_id": 2,
+ "batch_export_id": "018cf79f-a9e5-0001-cd6a-edc4886d939d",
+ "run_id": "62acf018-c2e4-47a8-88ff-20839ca84816",
+ "timestamp": "2024-01-11T09:03:23.257635Z",
+ "level": "INFO",
+ "message": "BatchExporting 109171 rows"
+ },
+ {
+ "team_id": 2,
+ "batch_export_id": "018cf79f-a9e5-0001-cd6a-edc4886d939d",
+ "run_id": "62acf018-c2e4-47a8-88ff-20839ca84816",
+ "timestamp": "2024-01-11T09:03:23.053067Z",
+ "level": "INFO",
+ "message": "Exporting batch 2024-01-11T08:00:00+00:00 - 2024-01-11T09:00:00+00:00"
+ },
+ {
+ "team_id": 2,
+ "batch_export_id": "018cf79f-a9e5-0001-cd6a-edc4886d939d",
+ "run_id": "62acf018-c2e4-47a8-88ff-20839ca84816",
+ "timestamp": "2024-01-11T09:03:23.011577Z",
+ "level": "INFO",
+ "message": "Creating batch export for range 2024-01-11T08:00:00+00:00 - 2024-01-11T09:00:00+00:00"
+ }
+ ]
+}
diff --git a/frontend/src/scenes/pipeline/__mocks__/batchExports.json b/frontend/src/scenes/pipeline/__mocks__/batchExports.json
new file mode 100644
index 0000000000000..1e1be726c89b2
--- /dev/null
+++ b/frontend/src/scenes/pipeline/__mocks__/batchExports.json
@@ -0,0 +1,622 @@
+{
+ "count": 4,
+ "next": null,
+ "previous": null,
+ "results": [
+ {
+ "id": "018cf79f-a9e5-0001-cd6a-edc4886d939d",
+ "team_id": 55,
+ "name": "BigQuery Export",
+ "destination": {
+ "type": "BigQuery",
+ "config": {
+ "table_id": "events",
+ "dataset_id": "test_team",
+ "project_id": "test_project",
+ "exclude_events": ["$feature_flag_called", "$autocapture"]
+ }
+ },
+ "interval": "hour",
+ "paused": false,
+ "created_at": "2023-10-31T14:50:57.592062Z",
+ "last_updated_at": "2023-10-31T14:50:57.592077Z",
+ "last_paused_at": null,
+ "start_at": null,
+ "end_at": null,
+ "latest_runs": [
+ {
+ "id": "018cf3ad-c044-0000-f73d-efedbcef15d4",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T13:00:00Z",
+ "data_interval_end": "2024-01-10T14:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T14:01:39.652690Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T14:01:39.652706Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf375-b884-0000-b4dc-7742ec74ca6e",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T12:00:00Z",
+ "data_interval_end": "2024-01-10T13:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T13:00:27.652569Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T13:00:27.652585Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf340-602d-0000-b975-14c6bea23e0f",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T11:00:00Z",
+ "data_interval_end": "2024-01-10T12:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T12:02:11.630354Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T12:02:11.630370Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf307-727b-0000-ff2f-594684196e95",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T10:00:00Z",
+ "data_interval_end": "2024-01-10T11:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T11:00:00.764328Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T11:00:00.764344Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf2d0-f082-0000-1691-65d5fcdd58c2",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T09:00:00Z",
+ "data_interval_end": "2024-01-10T10:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T10:00:28.546676Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T10:00:28.546691Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf29a-2b56-0000-be8c-2a9d082ddf53",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T08:00:00Z",
+ "data_interval_end": "2024-01-10T09:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T09:00:39.127174Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T09:00:39.127190Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf265-1fca-0000-3d80-0b28a1a16d3f",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T07:00:00Z",
+ "data_interval_end": "2024-01-10T08:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T08:02:42.762603Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T08:02:42.762619Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf22c-3e2a-0000-29ce-9e0411234222",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T06:00:00Z",
+ "data_interval_end": "2024-01-10T07:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T07:00:34.986990Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T07:00:34.987007Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf1f7-83be-0000-a42a-5bc47b873458",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T05:00:00Z",
+ "data_interval_end": "2024-01-10T06:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T06:02:59.390855Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T06:02:59.390871Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ },
+ {
+ "id": "018cf1c0-0e82-0000-1b08-3ac41a034690",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T04:00:00Z",
+ "data_interval_end": "2024-01-10T05:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T05:02:24.898450Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T05:02:24.898466Z",
+ "batch_export": "018b8637-5eb4-0001-2a6e-f2c322dfc79b"
+ }
+ ]
+ },
+ {
+ "id": "018a709a-9d6d-0001-ed23-4e1f795a8528",
+ "team_id": 2,
+ "name": "Test BigQuery Export",
+ "destination": {
+ "type": "BigQuery",
+ "config": {
+ "table_id": "smoke-test-table",
+ "dataset_id": "BatchExports",
+ "project_id": "test_project",
+ "exclude_events": [],
+ "json_config_file": [{}]
+ }
+ },
+ "interval": "hour",
+ "paused": true,
+ "created_at": "2023-09-07T17:04:55.757217Z",
+ "last_updated_at": "2023-11-21T09:52:42.606792Z",
+ "last_paused_at": "2023-11-21T09:52:42.606538Z",
+ "start_at": null,
+ "end_at": null,
+ "latest_runs": [
+ {
+ "id": "018bf14b-6023-0000-830a-4f2b9e66b294",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T08:00:00Z",
+ "data_interval_end": "2023-11-21T09:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:52:10.787820Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:52:10.787837Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-6016-0000-de89-a1133e12b77b",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T07:00:00Z",
+ "data_interval_end": "2023-11-21T08:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:52:10.774699Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:52:10.774715Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-4c2d-0000-b9bf-747735294f6e",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T07:00:00Z",
+ "data_interval_end": "2023-11-21T08:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:52:05.685216Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:52:05.685233Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-4c1e-0000-d52c-4ac7c854fbec",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T06:00:00Z",
+ "data_interval_end": "2023-11-21T07:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:52:05.670589Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:52:05.670606Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-382a-0000-7444-9f3db4bd5ffe",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T06:00:00Z",
+ "data_interval_end": "2023-11-21T07:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:52:00.555292Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:52:00.555308Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-381e-0000-f05c-bbe2c65baa2b",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T05:00:00Z",
+ "data_interval_end": "2023-11-21T06:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:52:00.554047Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:52:00.554066Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-2421-0000-3583-0ba272f9dc42",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T05:00:00Z",
+ "data_interval_end": "2023-11-21T06:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:51:55.441729Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:51:55.441748Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-240b-0000-5e6a-93cb86304878",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T04:00:00Z",
+ "data_interval_end": "2023-11-21T05:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:51:55.410632Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:51:55.410650Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-1008-0000-b63a-994105e63314",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T04:00:00Z",
+ "data_interval_end": "2023-11-21T05:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:51:50.288667Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:51:50.288686Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ },
+ {
+ "id": "018bf14b-0ff8-0000-69c1-87ac857ea7b7",
+ "status": "Failed",
+ "records_completed": null,
+ "latest_error": "NotFound: 404 POST https://bigquery.googleapis.com/bigquery/v2/projects/posthog-301601/datasets/BatchExports/tables?prettyPrint=false: Not found: Dataset posthog-301601:BatchExports",
+ "data_interval_start": "2023-11-21T03:00:00Z",
+ "data_interval_end": "2023-11-21T04:00:00Z",
+ "cursor": null,
+ "created_at": "2023-11-21T09:51:50.265044Z",
+ "finished_at": null,
+ "last_updated_at": "2023-11-21T09:51:50.265060Z",
+ "batch_export": "018a709a-9d6d-0001-ed23-4e1f795a8528"
+ }
+ ]
+ },
+ {
+ "id": "018a6fab-2c21-0001-d451-724c2995e2c0",
+ "team_id": 2,
+ "name": "S3 Export",
+ "destination": {
+ "type": "S3",
+ "config": {
+ "prefix": "us-cloud/",
+ "region": "us-east-1",
+ "bucket_name": "test_bucket",
+ "compression": "gzip",
+ "exclude_events": [""]
+ }
+ },
+ "interval": "hour",
+ "paused": false,
+ "created_at": "2023-09-07T12:43:23.557247Z",
+ "last_updated_at": "2023-09-07T12:43:23.557265Z",
+ "last_paused_at": null,
+ "start_at": null,
+ "end_at": null,
+ "latest_runs": [
+ {
+ "id": "018cf3ad-2672-0000-d2da-604bbff55766",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T13:00:00Z",
+ "data_interval_end": "2024-01-10T14:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T14:01:00.274767Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T14:01:00.274784Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf376-4661-0000-1b69-00968cc5b8e5",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T12:00:00Z",
+ "data_interval_end": "2024-01-10T13:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T13:01:03.969405Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T13:01:03.969421Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf342-3938-0000-7b34-c9a7c39a532a",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T11:00:00Z",
+ "data_interval_end": "2024-01-10T12:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T12:04:12.728709Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T12:04:12.728726Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf30b-e5a0-0000-70cd-dbd1ab98da03",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T10:00:00Z",
+ "data_interval_end": "2024-01-10T11:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T11:04:52.385134Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T11:04:52.385150Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf2d3-2f8b-0000-3565-905bcd3da0e0",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T09:00:00Z",
+ "data_interval_end": "2024-01-10T10:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T10:02:55.755873Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T10:02:55.755889Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf29a-bfeb-0000-2f71-391f3b1954ed",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T08:00:00Z",
+ "data_interval_end": "2024-01-10T09:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T09:01:17.163862Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T09:01:17.163878Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf265-51a1-0000-1dde-0996f50c3000",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T07:00:00Z",
+ "data_interval_end": "2024-01-10T08:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T08:02:55.521723Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T08:02:55.521737Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf22c-b2e2-0000-ba4a-a607074858c6",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T06:00:00Z",
+ "data_interval_end": "2024-01-10T07:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T07:01:04.866855Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T07:01:04.866872Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf1f7-e34a-0000-3982-89c0e9813711",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T05:00:00Z",
+ "data_interval_end": "2024-01-10T06:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T06:03:23.851061Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T06:03:23.851076Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ },
+ {
+ "id": "018cf1c2-153e-0000-4b0f-19daee02fc74",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2024-01-10T04:00:00Z",
+ "data_interval_end": "2024-01-10T05:00:00Z",
+ "cursor": null,
+ "created_at": "2024-01-10T05:04:37.694827Z",
+ "finished_at": null,
+ "last_updated_at": "2024-01-10T05:04:37.694843Z",
+ "batch_export": "018a6fab-2c21-0001-d451-724c2995e2c0"
+ }
+ ]
+ },
+ {
+ "id": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b",
+ "team_id": 2,
+ "name": "Test S3 Export",
+ "destination": {
+ "type": "S3",
+ "config": {
+ "prefix": "posthog-events/",
+ "region": "us-east-1",
+ "bucket_name": "test-bucket",
+ "compression": "gzip",
+ "exclude_events": ["$feature_flag_called"]
+ }
+ },
+ "interval": "hour",
+ "paused": true,
+ "created_at": "2023-09-04T09:32:45.980545Z",
+ "last_updated_at": "2023-09-04T12:11:16.548676Z",
+ "last_paused_at": "2023-09-04T12:11:16.548381Z",
+ "start_at": null,
+ "end_at": null,
+ "latest_runs": [
+ {
+ "id": "018a6b1c-3903-0000-f65d-ff750db1225d",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T21:00:00Z",
+ "data_interval_end": "2023-09-14T22:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:46.339621Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:46.339637Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-3719-0000-dc37-80e64d5f0746",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T20:00:00Z",
+ "data_interval_end": "2023-09-14T21:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:45.849725Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:45.849741Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-35d8-0000-7901-7ba7139c01d2",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T19:00:00Z",
+ "data_interval_end": "2023-09-14T20:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:45.529060Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:45.529079Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-348a-0000-636a-71a4c2623945",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T18:00:00Z",
+ "data_interval_end": "2023-09-14T19:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:45.194567Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:45.194584Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-3379-0000-7999-ef2f5fcb66d9",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T17:00:00Z",
+ "data_interval_end": "2023-09-14T18:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:44.921786Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:44.921803Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-3252-0000-f32a-e201e7455025",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T16:00:00Z",
+ "data_interval_end": "2023-09-14T17:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:44.626899Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:44.626915Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-30db-0000-46ac-38b01e4063ed",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T15:00:00Z",
+ "data_interval_end": "2023-09-14T16:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:44.251992Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:44.252009Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-2f15-0000-3328-b5dcea7052dd",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T14:00:00Z",
+ "data_interval_end": "2023-09-14T15:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:43.798210Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:43.798226Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-2d92-0000-45a2-76e6584fa4d0",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T13:00:00Z",
+ "data_interval_end": "2023-09-14T14:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:43.411157Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:43.411174Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ },
+ {
+ "id": "018a6b1c-2c52-0000-e888-dda84f4de7e0",
+ "status": "Completed",
+ "records_completed": null,
+ "latest_error": null,
+ "data_interval_start": "2023-09-14T12:00:00Z",
+ "data_interval_end": "2023-09-14T13:00:00Z",
+ "cursor": null,
+ "created_at": "2023-09-06T15:28:43.090535Z",
+ "finished_at": null,
+ "last_updated_at": "2023-09-06T15:28:43.090552Z",
+ "batch_export": "018a5f89-91b0-0001-9d3e-e1efd2b0fe2b"
+ }
+ ]
+ }
+ ]
+}
diff --git a/frontend/src/scenes/pipeline/__mocks__/transformationPluginConfigs.json b/frontend/src/scenes/pipeline/__mocks__/pluginConfigs.json
similarity index 100%
rename from frontend/src/scenes/pipeline/__mocks__/transformationPluginConfigs.json
rename to frontend/src/scenes/pipeline/__mocks__/pluginConfigs.json
diff --git a/frontend/src/scenes/pipeline/appsManagementLogic.tsx b/frontend/src/scenes/pipeline/appsManagementLogic.tsx
index 5598f95dc8ce0..9d5a54f3e68c5 100644
--- a/frontend/src/scenes/pipeline/appsManagementLogic.tsx
+++ b/frontend/src/scenes/pipeline/appsManagementLogic.tsx
@@ -1,4 +1,4 @@
-import { actions, afterMount, connect, kea, path, reducers, selectors } from 'kea'
+import { actions, afterMount, connect, kea, listeners, path, reducers, selectors } from 'kea'
import { loaders } from 'kea-loaders'
import api from 'lib/api'
import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast'
@@ -56,6 +56,12 @@ function capturePluginEvent(event: string, plugin: PluginType, type: PluginInsta
plugin_installation_type: type,
})
}
+export interface PluginUpdateStatusType {
+ latest_tag: string
+ upToDate: boolean
+ updated: boolean
+ error: string | null
+}
export const appsManagementLogic = kea([
path(['scenes', 'pipeline', 'appsManagementLogic']),
@@ -73,6 +79,10 @@ export const appsManagementLogic = kea([
installSourcePlugin: (name: string) => ({ name }),
installLocalPlugin: (path: string) => ({ path }),
patchPlugin: (id: number, pluginChanges: Partial = {}) => ({ id, pluginChanges }),
+ updatePlugin: (id: number) => ({ id }),
+ checkForUpdates: true,
+ checkedForUpdates: true,
+ setPluginLatestTag: (id: number, latestTag: string) => ({ id, latestTag }),
}),
loaders(({ values }) => ({
plugins: [
@@ -124,9 +134,25 @@ export const appsManagementLogic = kea([
return rest
},
patchPlugin: async ({ id, pluginChanges }) => {
+ if (!values.canGloballyManagePlugins) {
+ lemonToast.error("You don't have permission to update apps.")
+ }
const response = await api.update(`api/organizations/@current/plugins/${id}`, pluginChanges)
return { ...values.plugins, [id]: response }
},
+ setPluginLatestTag: async ({ id, latestTag }) => {
+ return { ...values.plugins, [id]: { ...values.plugins[id], latest_tag: latestTag } }
+ },
+ updatePlugin: async ({ id }) => {
+ if (!values.canGloballyManagePlugins) {
+ lemonToast.error("You don't have permission to update apps.")
+ }
+ // TODO: the update failed
+ const response = await api.create(`api/organizations/@current/plugins/${id}/upgrade`)
+ capturePluginEvent(`plugin updated`, values.plugins[id], values.plugins[id].plugin_type)
+ lemonToast.success(`Plugin ${response.name} updated!`)
+ return { ...values.plugins, [id]: response }
+ },
},
],
unusedPlugins: [
@@ -177,6 +203,13 @@ export const appsManagementLogic = kea([
installPluginSuccess: () => SourcePluginKind.FilterEvent,
},
],
+ checkingForUpdates: [
+ false,
+ {
+ checkForUpdates: () => true,
+ checkedForUpdates: () => false,
+ },
+ ],
}),
selectors({
canInstallPlugins: [(s) => [s.user], (user) => canInstallPlugins(user?.organization)],
@@ -206,9 +239,41 @@ export const appsManagementLogic = kea([
)
},
],
+ updatablePlugins: [
+ (s) => [s.plugins],
+ (plugins) =>
+ Object.values(plugins).filter(
+ (plugin) => plugin.plugin_type !== PluginInstallationType.Source && !plugin.url?.startsWith('file:')
+ ),
+ ],
+ pluginsNeedingUpdates: [
+ (s) => [s.updatablePlugins],
+ (plugins) => {
+ return plugins.filter((plugin) => plugin.latest_tag && plugin.tag !== plugin.latest_tag)
+ },
+ ],
}),
+ listeners(({ actions, values }) => ({
+ checkForUpdates: async () => {
+ await Promise.all(
+ values.updatablePlugins.map(async (plugin) => {
+ try {
+ const updates = await api.get(
+ `api/organizations/@current/plugins/${plugin.id}/check_for_updates`
+ )
+ actions.setPluginLatestTag(plugin.id, updates.plugin.latest_tag)
+ } catch (e) {
+ lemonToast.error(`Error checking for updates for ${plugin.name}: ${JSON.stringify(e)}`)
+ }
+ })
+ )
+
+ actions.checkedForUpdates()
+ },
+ })),
afterMount(({ actions }) => {
actions.loadPlugins()
actions.loadUnusedPlugins()
+ actions.checkForUpdates()
}),
])
diff --git a/frontend/src/scenes/plugins/utils.ts b/frontend/src/scenes/pipeline/configUtils.ts
similarity index 55%
rename from frontend/src/scenes/plugins/utils.ts
rename to frontend/src/scenes/pipeline/configUtils.ts
index 500916dbcea45..0b03c4d567d75 100644
--- a/frontend/src/scenes/plugins/utils.ts
+++ b/frontend/src/scenes/pipeline/configUtils.ts
@@ -1,7 +1,8 @@
-import { PluginConfigSchema } from '@posthog/plugin-scaffold'
-import type { FormInstance } from 'antd/lib/form/hooks/useForm.d'
+import { PluginConfigChoice, PluginConfigSchema } from '@posthog/plugin-scaffold'
import { PluginTypeWithConfig } from 'scenes/plugins/types'
+import { PluginType } from '~/types'
+
// Keep this in sync with: posthog/api/plugin.py
export const SECRET_FIELD_VALUE = '**************** POSTHOG SECRET FIELD ****************'
@@ -33,7 +34,7 @@ export function getConfigSchemaObject(
}
}
-export function defaultConfigForPlugin(plugin: PluginTypeWithConfig): Record {
+export function defaultConfigForPlugin(plugin: PluginType): Record {
const config: Record = {}
for (const field of getConfigSchemaArray(plugin.config_schema)) {
if (field.key && typeof field.default !== 'undefined') {
@@ -71,14 +72,61 @@ export function getPluginConfigFormData(
return formData
}
-export const doFieldRequirementsMatch = (
- form: FormInstance,
+const doFieldRequirementsMatch = (
+ getFieldValue: (fieldName: string) => any,
targetFieldName: string | undefined,
targetFieldValue: string | undefined
): boolean => {
- const formActualValue = form.getFieldValue(targetFieldName || '') || ''
+ const formActualValue = getFieldValue(targetFieldName || '') || ''
const targetAnyValue = typeof targetFieldValue === 'undefined'
const formValueSet = !!formActualValue
return (targetAnyValue && formValueSet) || targetFieldValue === formActualValue
}
+
+export const determineInvisibleFields = (getFieldValue: (fieldName: string) => any, plugin: PluginType): string[] => {
+ const fieldsToSetAsInvisible = []
+ for (const field of Object.values(getConfigSchemaArray(plugin.config_schema || {}))) {
+ if (!field.visible_if || !field.key) {
+ continue
+ }
+ const shouldBeVisible = field.visible_if.every(
+ ([targetFieldName, targetFieldValue]: Array) =>
+ doFieldRequirementsMatch(getFieldValue, targetFieldName, targetFieldValue)
+ )
+
+ if (!shouldBeVisible) {
+ fieldsToSetAsInvisible.push(field.key)
+ }
+ }
+ return fieldsToSetAsInvisible
+}
+
+export const determineRequiredFields = (getFieldValue: (fieldName: string) => any, plugin: PluginType): string[] => {
+ const fieldsToSetAsRequired = []
+ for (const field of Object.values(getConfigSchemaArray(plugin.config_schema || {}))) {
+ if (!field.required_if || !Array.isArray(field.required_if) || !field.key) {
+ continue
+ }
+ const shouldBeRequired = field.required_if.every(
+ ([targetFieldName, targetFieldValue]: Array) =>
+ doFieldRequirementsMatch(getFieldValue, targetFieldName, targetFieldValue)
+ )
+ if (shouldBeRequired) {
+ fieldsToSetAsRequired.push(field.key)
+ }
+ }
+ return fieldsToSetAsRequired
+}
+
+export const isValidChoiceConfig = (fieldConfig: PluginConfigChoice): boolean => {
+ return (
+ Array.isArray(fieldConfig.choices) &&
+ !!fieldConfig.choices.length &&
+ !fieldConfig.choices.find((c) => typeof c !== 'string') &&
+ !fieldConfig.secret
+ )
+}
+
+export const isValidField = (fieldConfig: PluginConfigSchema): boolean =>
+ fieldConfig.type !== 'choice' || isValidChoiceConfig(fieldConfig)
diff --git a/frontend/src/scenes/pipeline/destinationsLogic.tsx b/frontend/src/scenes/pipeline/destinationsLogic.tsx
index f596823a79106..8a4c3d3494a8f 100644
--- a/frontend/src/scenes/pipeline/destinationsLogic.tsx
+++ b/frontend/src/scenes/pipeline/destinationsLogic.tsx
@@ -1,14 +1,63 @@
-import { actions, afterMount, connect, kea, path, selectors } from 'kea'
+import { lemonToast } from '@posthog/lemon-ui'
+import { actions, afterMount, connect, kea, listeners, path, selectors } from 'kea'
import { loaders } from 'kea-loaders'
import api from 'lib/api'
import { canConfigurePlugins } from 'scenes/plugins/access'
import { teamLogic } from 'scenes/teamLogic'
+import { urls } from 'scenes/urls'
import { userLogic } from 'scenes/userLogic'
-import { PluginConfigTypeNew, PluginConfigWithPluginInfoNew, PluginType, ProductKey } from '~/types'
+import {
+ BatchExportConfiguration,
+ PipelineAppKind,
+ PipelineAppTab,
+ PluginConfigTypeNew,
+ PluginConfigWithPluginInfoNew,
+ PluginType,
+ ProductKey,
+} from '~/types'
import type { pipelineDestinationsLogicType } from './destinationsLogicType'
-import { capturePluginEvent } from './utils'
+import { captureBatchExportEvent, capturePluginEvent } from './utils'
+
+interface WebhookSuccessRate {
+ '24h': number | null
+ '7d': number | null
+}
+interface BatchExportSuccessRate {
+ '24h': [successes: number, failures: number]
+ '7d': [successes: number, failures: number]
+}
+
+interface DestinationTypeBase {
+ name: string
+ description?: string
+ enabled: boolean
+ config_url: string
+ metrics_url: string
+ logs_url: string
+ updated_at: string
+ frequency: 'realtime' | BatchExportConfiguration['interval']
+}
+export enum PipelineAppBackend {
+ BatchExport = 'batch_export',
+ Plugin = 'plugin',
+}
+
+export interface BatchExportDestination extends DestinationTypeBase {
+ backend: PipelineAppBackend.BatchExport
+ id: string
+ success_rates: BatchExportSuccessRate
+ app_source_code_url?: never
+}
+export interface WebhookDestination extends DestinationTypeBase {
+ backend: PipelineAppBackend.Plugin
+ id: number
+ plugin: PluginType
+ app_source_code_url?: string
+ success_rates: WebhookSuccessRate
+}
+export type DestinationType = BatchExportDestination | WebhookDestination
export const pipelineDestinationsLogic = kea([
path(['scenes', 'pipeline', 'destinationsLogic']),
@@ -16,7 +65,7 @@ export const pipelineDestinationsLogic = kea([
values: [teamLogic, ['currentTeamId'], userLogic, ['user']],
}),
actions({
- loadPluginConfigs: true,
+ toggleEnabled: (destination: DestinationType, enabled: boolean) => ({ destination, enabled }),
}),
loaders(({ values }) => ({
plugins: [
@@ -40,7 +89,7 @@ export const pipelineDestinationsLogic = kea([
loadPluginConfigs: async () => {
const pluginConfigs: Record = {}
const results = await api.loadPaginatedResults(
- `api/projects/${values.currentTeamId}/pipeline_destinations_configs`
+ `api/projects/${values.currentTeamId}/pipeline_destination_configs`
)
for (const pluginConfig of results) {
@@ -54,26 +103,51 @@ export const pipelineDestinationsLogic = kea([
}
return pluginConfigs
},
- toggleEnabled: async ({ id, enabled }) => {
+ toggleEnabledWebhook: async ({ destination, enabled }) => {
+ if (destination.type === 'batch_export') {
+ return values.pluginConfigs
+ }
if (!values.canConfigurePlugins) {
return values.pluginConfigs
}
const { pluginConfigs, plugins } = values
- const pluginConfig = pluginConfigs[id]
+ const pluginConfig = pluginConfigs[destination.id]
const plugin = plugins[pluginConfig.plugin]
capturePluginEvent(`plugin ${enabled ? 'enabled' : 'disabled'}`, plugin, pluginConfig)
- const response = await api.update(`api/plugin_config/${id}`, {
+ const response = await api.update(`api/plugin_config/${destination.id}`, {
enabled,
})
- return { ...pluginConfigs, [id]: response }
+ return { ...pluginConfigs, [destination.id]: response }
+ },
+ },
+ ],
+ batchExportConfigs: [
+ {} as Record,
+ {
+ loadBatchExports: async () => {
+ const results: BatchExportConfiguration[] = await api.loadPaginatedResults(
+ `api/projects/${values.currentTeamId}/batch_exports`
+ )
+ return Object.fromEntries(results.map((batchExport) => [batchExport.id, batchExport]))
+ },
+ toggleEnabledBatchExport: async ({ destination, enabled }) => {
+ const batchExport = values.batchExportConfigs[destination.id]
+ if (enabled) {
+ await api.batchExports.pause(destination.id)
+ } else {
+ await api.batchExports.unpause(destination.id)
+ }
+ captureBatchExportEvent(`batch export ${enabled ? 'enabled' : 'disabled'}`, batchExport)
+ return { ...values.batchExportConfigs, [destination.id]: { ...batchExport, paused: !enabled } }
},
},
],
})),
selectors({
loading: [
- (s) => [s.pluginsLoading, s.pluginConfigsLoading],
- (pluginsLoading, pluginConfigsLoading) => pluginsLoading || pluginConfigsLoading,
+ (s) => [s.pluginsLoading, s.pluginConfigsLoading, s.batchExportConfigsLoading],
+ (pluginsLoading, pluginConfigsLoading, batchExportConfigsLoading) =>
+ pluginsLoading || pluginConfigsLoading || batchExportConfigsLoading,
],
enabledPluginConfigs: [
(s) => [s.pluginConfigs],
@@ -96,6 +170,55 @@ export const pipelineDestinationsLogic = kea([
return withPluginInfo
},
],
+ destinations: [
+ (s) => [s.pluginConfigs, s.plugins, s.batchExportConfigs],
+ (pluginConfigs, plugins, batchExportConfigs): DestinationType[] => {
+ const appDests = Object.values(pluginConfigs).map((pluginConfig) => ({
+ backend: PipelineAppBackend.Plugin,
+ frequency: 'realtime',
+ id: pluginConfig.id,
+ name: pluginConfig.name,
+ description: pluginConfig.description,
+ enabled: pluginConfig.enabled,
+ config_url: urls.pipelineApp(
+ PipelineAppKind.Destination,
+ pluginConfig.id,
+ PipelineAppTab.Configuration
+ ),
+ metrics_url: urls.pipelineApp(PipelineAppKind.Destination, pluginConfig.id, PipelineAppTab.Metrics),
+ logs_url: urls.pipelineApp(PipelineAppKind.Destination, pluginConfig.id, PipelineAppTab.Logs),
+ app_source_code_url: '',
+ plugin: plugins[pluginConfig.plugin],
+ success_rates: {
+ '24h': pluginConfig.delivery_rate_24h === undefined ? null : pluginConfig.delivery_rate_24h,
+ '7d': null, // TODO: start populating real data for this
+ },
+ updated_at: pluginConfig.updated_at,
+ }))
+ const batchDests = Object.values(batchExportConfigs).map((batchExport) => ({
+ backend: PipelineAppBackend.BatchExport,
+ frequency: batchExport.interval,
+ id: batchExport.id,
+ name: batchExport.name,
+ description: `${batchExport.destination.type} batch export`, // TODO: add to backend
+ enabled: !batchExport.paused,
+ config_url: urls.pipelineApp(
+ PipelineAppKind.Destination,
+ batchExport.id,
+ PipelineAppTab.Configuration
+ ),
+ metrics_url: urls.pipelineApp(PipelineAppKind.Destination, batchExport.id, PipelineAppTab.Metrics),
+ logs_url: urls.pipelineApp(PipelineAppKind.Destination, batchExport.id, PipelineAppTab.Logs),
+ success_rates: {
+ '24h': [5, 17],
+ '7d': [12, 100043],
+ },
+ updated_at: batchExport.created_at, // TODO: Add updated_at to batch exports in the backend
+ }))
+ const enabledFirst = [...appDests, ...batchDests].sort((a, b) => Number(b.enabled) - Number(a.enabled))
+ return enabledFirst
+ },
+ ],
// This is currently an organization level setting but might in the future be user level
// it's better to add the permission checks everywhere now
canConfigurePlugins: [(s) => [s.user], (user) => canConfigurePlugins(user?.organization)],
@@ -106,8 +229,22 @@ export const pipelineDestinationsLogic = kea([
},
],
}),
+ listeners(({ actions, values }) => ({
+ toggleEnabled: async ({ destination, enabled }) => {
+ if (!values.canConfigurePlugins) {
+ lemonToast.error("You don't have permission to enable or disable destinations")
+ return
+ }
+ if (destination.backend === 'plugin') {
+ actions.toggleEnabledWebhook({ destination: destination, enabled: enabled })
+ } else {
+ actions.toggleEnabledBatchExport({ destination: destination, enabled: enabled })
+ }
+ },
+ })),
afterMount(({ actions }) => {
actions.loadPlugins()
actions.loadPluginConfigs()
+ actions.loadBatchExports()
}),
])
diff --git a/frontend/src/scenes/pipeline/pipelineAppLogic.tsx b/frontend/src/scenes/pipeline/pipelineAppLogic.tsx
index b7f3b0110ae58..fa635fb146f8e 100644
--- a/frontend/src/scenes/pipeline/pipelineAppLogic.tsx
+++ b/frontend/src/scenes/pipeline/pipelineAppLogic.tsx
@@ -1,56 +1,110 @@
-import { actions, kea, key, path, props, reducers, selectors } from 'kea'
+import { actions, connect, kea, key, path, props, reducers, selectors } from 'kea'
+import { forms } from 'kea-forms'
import { actionToUrl, urlToAction } from 'kea-router'
+import { capitalizeFirstLetter } from 'lib/utils'
import { Scene } from 'scenes/sceneTypes'
import { urls } from 'scenes/urls'
-import { Breadcrumb, PipelineAppTabs } from '~/types'
+import { Breadcrumb, PipelineAppKind, PipelineAppTab, PluginConfigTypeNew, PluginType } from '~/types'
+import { PipelineAppBackend, pipelineDestinationsLogic } from './destinationsLogic'
import type { pipelineAppLogicType } from './pipelineAppLogicType'
export interface PipelineAppLogicProps {
- id: number
+ id: number | string
+ /** Might be null if a non-existent kind is set in th URL. */
+ kind: PipelineAppKind | null
}
export const pipelineAppLogic = kea([
props({} as PipelineAppLogicProps),
- key(({ id }) => id),
+ key(({ kind, id }) => `${kind}:${id}`),
path((id) => ['scenes', 'pipeline', 'pipelineAppLogic', id]),
+ connect(() => ({
+ values: [pipelineDestinationsLogic, ['plugins', 'pluginsLoading', 'pluginConfigs', 'pluginConfigsLoading']],
+ })),
actions({
- setCurrentTab: (tab: PipelineAppTabs = PipelineAppTabs.Configuration) => ({ tab }),
+ setCurrentTab: (tab: PipelineAppTab = PipelineAppTab.Configuration) => ({ tab }),
}),
reducers({
currentTab: [
- PipelineAppTabs.Configuration as PipelineAppTabs,
+ PipelineAppTab.Configuration as PipelineAppTab,
{
setCurrentTab: (_, { tab }) => tab,
},
],
}),
- selectors({
+ selectors(() => ({
breadcrumbs: [
- () => [],
- (): Breadcrumb[] => [
+ (s, p) => [p.id, p.kind, s.maybePluginConfig],
+ (id, kind, maybePluginConfig): Breadcrumb[] => [
{
key: Scene.Pipeline,
- name: 'Pipeline',
+ name: 'Data pipeline',
+ path: urls.pipeline(),
+ },
+ {
+ key: kind || 'unknown',
+ name: kind ? capitalizeFirstLetter(kind) : 'Unknown',
path: urls.pipeline(),
},
{
- key: 'todo',
- name: 'App name',
+ key: [Scene.PipelineApp, id],
+ name: maybePluginConfig ? maybePluginConfig.name || 'Unnamed' : 'Unknown',
},
],
],
+ appBackend: [
+ (_, p) => [p.id],
+ (id): PipelineAppBackend =>
+ typeof id === 'string' ? PipelineAppBackend.BatchExport : PipelineAppBackend.Plugin,
+ ],
+ loading: [
+ (s) => [s.appBackend, s.pluginConfigsLoading, s.pluginsLoading],
+ (appBackend, pluginConfigsLoading, pluginsLoading): boolean => {
+ if (appBackend === PipelineAppBackend.BatchExport) {
+ return false // TODO: Support loading state for batch exports
+ }
+ return pluginConfigsLoading || pluginsLoading
+ },
+ ],
+ maybePluginConfig: [
+ (s, p) => [s.pluginConfigs, s.appBackend, p.id],
+ (pluginConfigs, appBackend, maybePluginConfigId): PluginConfigTypeNew | null => {
+ if (appBackend !== 'plugin') {
+ return null
+ }
+ return pluginConfigs[maybePluginConfigId] || null
+ },
+ ],
+ maybePlugin: [
+ (s) => [s.plugins, s.maybePluginConfig],
+ (plugins, maybePluginConfig): PluginType | null => {
+ if (!maybePluginConfig) {
+ return null
+ }
+ return plugins[maybePluginConfig.plugin] || null
+ },
+ ],
+ kind: [(_, p) => [p.kind], (kind) => kind],
+ })),
+ forms({
+ configuration: {
+ // TOOD: Validate that required fields are filled in
+ submit: () => {
+ // TODO
+ },
+ },
}),
actionToUrl(({ values, props }) => {
return {
- setCurrentTab: () => [urls.pipelineApp(props.id, values.currentTab)],
+ setCurrentTab: () => [urls.pipelineApp(props.kind as PipelineAppKind, props.id, values.currentTab)],
}
}),
urlToAction(({ actions, values }) => ({
- '/pipeline/:id/:tab': ({ tab }) => {
- if (tab !== values.currentTab) {
- actions.setCurrentTab(tab as PipelineAppTabs)
+ '/pipeline/:kindTab/:id/:appTab': ({ appTab }) => {
+ if (appTab !== values.currentTab && Object.values(PipelineAppTab).includes(appTab as PipelineAppTab)) {
+ actions.setCurrentTab(appTab as PipelineAppTab)
}
},
})),
diff --git a/frontend/src/scenes/pipeline/pipelineAppLogsLogic.tsx b/frontend/src/scenes/pipeline/pipelineAppLogsLogic.tsx
new file mode 100644
index 0000000000000..78d3a28fad30f
--- /dev/null
+++ b/frontend/src/scenes/pipeline/pipelineAppLogsLogic.tsx
@@ -0,0 +1,235 @@
+import { LemonTableColumns } from '@posthog/lemon-ui'
+import { actions, connect, events, kea, key, listeners, path, props, reducers, selectors } from 'kea'
+import { loaders } from 'kea-loaders'
+import { LOGS_PORTION_LIMIT } from 'lib/constants'
+import { dayjs } from 'lib/dayjs'
+import { PipelineAppBackend } from 'scenes/pipeline/destinationsLogic'
+import { pipelineAppLogic, PipelineAppLogicProps } from 'scenes/pipeline/pipelineAppLogic'
+
+import api from '~/lib/api'
+import { BatchExportLogEntry, PluginLogEntry } from '~/types'
+
+import { teamLogic } from '../teamLogic'
+import type { pipelineAppLogsLogicType } from './pipelineAppLogsLogicType'
+import { LogLevelDisplay, logLevelsToTypeFilters, LogTypeDisplay } from './utils'
+
+export type LogEntry = BatchExportLogEntry | PluginLogEntry
+
+export enum PipelineAppLogLevel {
+ Debug = 'DEBUG',
+ Log = 'LOG',
+ Info = 'INFO',
+ Warning = 'WARNING',
+ Error = 'ERROR',
+}
+
+export const pipelineAppLogsLogic = kea([
+ props({} as PipelineAppLogicProps),
+ key(({ id }: PipelineAppLogicProps) => id),
+ path((key) => ['scenes', 'pipeline', 'pipelineAppLogsLogic', key]),
+ connect((props: PipelineAppLogicProps) => ({
+ values: [teamLogic(), ['currentTeamId'], pipelineAppLogic(props), ['appBackend']],
+ })),
+ actions({
+ setSelectedLogLevels: (levels: PipelineAppLogLevel[]) => ({
+ levels,
+ }),
+ setSearchTerm: (searchTerm: string) => ({ searchTerm }),
+ clearBackgroundLogs: true,
+ markLogsEnd: true,
+ }),
+ loaders(({ props: { id }, values, actions, cache }) => ({
+ logs: {
+ __default: [] as PluginLogEntry[] | BatchExportLogEntry[],
+ loadLogs: async () => {
+ let results: LogEntry[]
+ if (values.appBackend === PipelineAppBackend.BatchExport) {
+ results = await api.batchExportLogs.search(
+ id as string,
+ values.currentTeamId,
+ values.searchTerm,
+ values.selectedLogLevels
+ )
+ } else {
+ results = await api.pluginLogs.search(
+ id as number,
+ values.currentTeamId,
+ values.searchTerm,
+ logLevelsToTypeFilters(values.selectedLogLevels)
+ )
+ }
+
+ if (!cache.pollingInterval) {
+ cache.pollingInterval = setInterval(actions.pollBackgroundLogs, 5000)
+ }
+ actions.clearBackgroundLogs()
+ return results
+ },
+ loadMoreLogs: async () => {
+ let results: LogEntry[]
+ if (values.appBackend === PipelineAppBackend.BatchExport) {
+ results = await api.batchExportLogs.search(
+ id as string,
+ values.currentTeamId,
+ values.searchTerm,
+ values.selectedLogLevels,
+ values.trailingEntry as BatchExportLogEntry | null
+ )
+ } else {
+ results = await api.pluginLogs.search(
+ id as number,
+ values.currentTeamId,
+ values.searchTerm,
+ logLevelsToTypeFilters(values.selectedLogLevels),
+ values.trailingEntry as PluginLogEntry | null
+ )
+ }
+
+ if (results.length < LOGS_PORTION_LIMIT) {
+ actions.markLogsEnd()
+ }
+ return [...values.logs, ...results]
+ },
+ revealBackground: () => {
+ const newArray = [...values.backgroundLogs, ...values.logs]
+ actions.clearBackgroundLogs()
+ return newArray
+ },
+ },
+ backgroundLogs: {
+ __default: [] as PluginLogEntry[] | BatchExportLogEntry[],
+ pollBackgroundLogs: async () => {
+ // we fetch new logs in the background and allow the user to expand
+ // them into the array of visible logs
+ if (values.logsLoading) {
+ return values.backgroundLogs
+ }
+
+ let results: LogEntry[]
+ if (values.appBackend === PipelineAppBackend.BatchExport) {
+ results = await api.batchExportLogs.search(
+ id as string,
+ values.currentTeamId,
+ values.searchTerm,
+ values.selectedLogLevels,
+ null,
+ values.leadingEntry as BatchExportLogEntry | null
+ )
+ } else {
+ results = await api.pluginLogs.search(
+ id as number,
+ values.currentTeamId,
+ values.searchTerm,
+ logLevelsToTypeFilters(values.selectedLogLevels),
+ null,
+ values.leadingEntry as PluginLogEntry | null
+ )
+ }
+
+ return [...results, ...values.backgroundLogs]
+ },
+ },
+ })),
+ reducers({
+ selectedLogLevels: [
+ Object.values(PipelineAppLogLevel).filter((level) => level !== 'DEBUG'),
+ {
+ setSelectedLogLevels: (_, { levels }) => levels,
+ },
+ ],
+ backgroundLogs: [
+ [] as PluginLogEntry[] | BatchExportLogEntry[],
+ {
+ clearBackgroundLogs: () => [],
+ },
+ ],
+ searchTerm: [
+ '',
+ {
+ setSearchTerm: (_, { searchTerm }) => searchTerm,
+ },
+ ],
+ isThereMoreToLoad: [
+ true,
+ {
+ loadLogsSuccess: (_, { logs }) => logs.length >= LOGS_PORTION_LIMIT,
+ markLogsEnd: () => false,
+ },
+ ],
+ }),
+ selectors({
+ leadingEntry: [
+ (s) => [s.logs, s.backgroundLogs],
+ (logs: LogEntry[], backgroundLogs: LogEntry[]): LogEntry | null => {
+ if (backgroundLogs.length) {
+ return backgroundLogs[0]
+ }
+ if (logs.length) {
+ return logs[0]
+ }
+ return null
+ },
+ ],
+ trailingEntry: [
+ (s) => [s.logs, s.backgroundLogs],
+ (logs: LogEntry[], backgroundLogs: LogEntry[]): LogEntry | null => {
+ if (logs.length) {
+ return logs[logs.length - 1]
+ }
+ if (backgroundLogs.length) {
+ return backgroundLogs[backgroundLogs.length - 1]
+ }
+ return null
+ },
+ ],
+ columns: [
+ (s) => [s.appBackend],
+ (appBackend): LemonTableColumns => {
+ return [
+ {
+ title: 'Timestamp',
+ key: 'timestamp',
+ dataIndex: 'timestamp',
+ render: (timestamp: string) => dayjs(timestamp).format('YYYY-MM-DD HH:mm:ss.SSS UTC'),
+ },
+ {
+ title: appBackend === PipelineAppBackend.BatchExport ? 'Run Id' : 'Source',
+ dataIndex: appBackend === PipelineAppBackend.BatchExport ? 'run_id' : 'source',
+ key: appBackend === PipelineAppBackend.BatchExport ? 'run_id' : 'source',
+ },
+ {
+ title: 'Level',
+ key: appBackend === PipelineAppBackend.BatchExport ? 'level' : 'type',
+ dataIndex: appBackend === PipelineAppBackend.BatchExport ? 'level' : 'type',
+ render: appBackend === PipelineAppBackend.BatchExport ? LogLevelDisplay : LogTypeDisplay,
+ },
+ {
+ title: 'Message',
+ key: 'message',
+ dataIndex: 'message',
+ render: (message: string) => {message}
,
+ },
+ ] as LemonTableColumns
+ },
+ ],
+ }),
+ listeners(({ actions }) => ({
+ setSelectedLogLevels: () => {
+ actions.loadLogs()
+ },
+ setSearchTerm: async ({ searchTerm }, breakpoint) => {
+ if (searchTerm) {
+ await breakpoint(1000)
+ }
+ actions.loadLogs()
+ },
+ })),
+ events(({ actions, cache }) => ({
+ afterMount: () => {
+ actions.loadLogs()
+ },
+ beforeUnmount: () => {
+ clearInterval(cache.pollingInterval)
+ },
+ })),
+])
diff --git a/frontend/src/scenes/pipeline/pipelineLogic.tsx b/frontend/src/scenes/pipeline/pipelineLogic.tsx
index 28e5124c44179..3fbb4506afe6e 100644
--- a/frontend/src/scenes/pipeline/pipelineLogic.tsx
+++ b/frontend/src/scenes/pipeline/pipelineLogic.tsx
@@ -3,32 +3,19 @@ import { actionToUrl, urlToAction } from 'kea-router'
import { Scene } from 'scenes/sceneTypes'
import { urls } from 'scenes/urls'
-import { Breadcrumb, PipelineTabs } from '~/types'
+import { Breadcrumb, PipelineTab } from '~/types'
import type { pipelineLogicType } from './pipelineLogicType'
-export const singularName = (tab: PipelineTabs): string => {
+export const humanFriendlyTabName = (tab: PipelineTab): string => {
switch (tab) {
- case PipelineTabs.Filters:
- return 'filter'
- case PipelineTabs.Transformations:
- return 'transformation'
- case PipelineTabs.Destinations:
- return 'destination'
- default:
- return ''
- }
-}
-
-export const humanFriendlyTabName = (tab: PipelineTabs): string => {
- switch (tab) {
- case PipelineTabs.Filters:
+ case PipelineTab.Filters:
return 'Filters'
- case PipelineTabs.Transformations:
+ case PipelineTab.Transformations:
return 'Transformations'
- case PipelineTabs.Destinations:
+ case PipelineTab.Destinations:
return 'Destinations'
- case PipelineTabs.AppsManagement:
+ case PipelineTab.AppsManagement:
return 'Apps management'
}
}
@@ -36,11 +23,11 @@ export const humanFriendlyTabName = (tab: PipelineTabs): string => {
export const pipelineLogic = kea([
path(['scenes', 'pipeline', 'pipelineLogic']),
actions({
- setCurrentTab: (tab: PipelineTabs = PipelineTabs.Destinations) => ({ tab }),
+ setCurrentTab: (tab: PipelineTab = PipelineTab.Destinations) => ({ tab }),
}),
reducers({
currentTab: [
- PipelineTabs.Destinations as PipelineTabs,
+ PipelineTab.Destinations as PipelineTab,
{
setCurrentTab: (_, { tab }) => tab,
},
@@ -68,7 +55,7 @@ export const pipelineLogic = kea([
urlToAction(({ actions, values }) => ({
'/pipeline/:tab': ({ tab }) => {
if (tab !== values.currentTab) {
- actions.setCurrentTab(tab as PipelineTabs)
+ actions.setCurrentTab(tab as PipelineTab)
}
},
})),
diff --git a/frontend/src/scenes/pipeline/transformationsLogic.tsx b/frontend/src/scenes/pipeline/transformationsLogic.tsx
index 63afc8441bfba..791c5e0e0c565 100644
--- a/frontend/src/scenes/pipeline/transformationsLogic.tsx
+++ b/frontend/src/scenes/pipeline/transformationsLogic.tsx
@@ -53,7 +53,7 @@ export const pipelineTransformationsLogic = kea {
const res: PluginConfigTypeNew[] = await api.loadPaginatedResults(
- `api/projects/${values.currentTeamId}/pipeline_transformations_configs`
+ `api/projects/${values.currentTeamId}/pipeline_transformation_configs`
)
return Object.fromEntries(res.map((pluginConfig) => [pluginConfig.id, pluginConfig]))
diff --git a/frontend/src/scenes/pipeline/utils.tsx b/frontend/src/scenes/pipeline/utils.tsx
index c861032cc1f30..51d2914e346cf 100644
--- a/frontend/src/scenes/pipeline/utils.tsx
+++ b/frontend/src/scenes/pipeline/utils.tsx
@@ -5,7 +5,9 @@ import { Tooltip } from 'lib/lemon-ui/Tooltip'
import posthog from 'posthog-js'
import { PluginImage, PluginImageSize } from 'scenes/plugins/plugin/PluginImage'
-import { PluginConfigTypeNew, PluginType } from '~/types'
+import { BatchExportConfiguration, PluginConfigTypeNew, PluginLogEntryType, PluginType } from '~/types'
+
+import { PipelineAppLogLevel } from './pipelineAppLogsLogic'
export function capturePluginEvent(event: string, plugin: PluginType, pluginConfig: PluginConfigTypeNew): void {
posthog.capture(event, {
@@ -14,6 +16,13 @@ export function capturePluginEvent(event: string, plugin: PluginType, pluginConf
plugin_config_id: pluginConfig.id,
})
}
+export function captureBatchExportEvent(event: string, batchExport: BatchExportConfiguration): void {
+ posthog.capture(event, {
+ batch_export_id: batchExport.id,
+ batch_export_name: batchExport.name,
+ batch_export_destination_type: batchExport.destination.type,
+ })
+}
const PAGINATION_DEFAULT_MAX_PAGES = 10
export async function loadPaginatedResults(
@@ -68,3 +77,68 @@ export function RenderApp({ plugin, imageSize }: RenderAppProps): JSX.Element {
)
}
+
+export const logLevelToTypeFilter = (level: PipelineAppLogLevel): PluginLogEntryType => {
+ switch (level) {
+ case PipelineAppLogLevel.Debug:
+ return PluginLogEntryType.Debug
+ case PipelineAppLogLevel.Error:
+ return PluginLogEntryType.Error
+ case PipelineAppLogLevel.Info:
+ return PluginLogEntryType.Info
+ case PipelineAppLogLevel.Log:
+ return PluginLogEntryType.Log
+ case PipelineAppLogLevel.Warning:
+ return PluginLogEntryType.Warn
+ default:
+ throw new Error('unknown log level')
+ }
+}
+
+export const logLevelsToTypeFilters = (levels: PipelineAppLogLevel[]): PluginLogEntryType[] =>
+ levels.map((l) => logLevelToTypeFilter(l))
+
+export const typeToLogLevel = (type: PluginLogEntryType): PipelineAppLogLevel => {
+ switch (type) {
+ case PluginLogEntryType.Debug:
+ return PipelineAppLogLevel.Debug
+ case PluginLogEntryType.Error:
+ return PipelineAppLogLevel.Error
+ case PluginLogEntryType.Info:
+ return PipelineAppLogLevel.Info
+ case PluginLogEntryType.Log:
+ return PipelineAppLogLevel.Log
+ case PluginLogEntryType.Warn:
+ return PipelineAppLogLevel.Warning
+ default:
+ throw new Error('unknown log type')
+ }
+}
+
+export function LogLevelDisplay(level: PipelineAppLogLevel): JSX.Element {
+ let color: string | undefined
+ switch (level) {
+ case PipelineAppLogLevel.Debug:
+ color = 'text-muted'
+ break
+ case PipelineAppLogLevel.Log:
+ color = 'text-default'
+ break
+ case PipelineAppLogLevel.Info:
+ color = 'text-primary'
+ break
+ case PipelineAppLogLevel.Warning:
+ color = 'text-warning'
+ break
+ case PipelineAppLogLevel.Error:
+ color = 'text-danger'
+ break
+ default:
+ break
+ }
+ return
{level}
+}
+
+export function LogTypeDisplay(type: PluginLogEntryType): JSX.Element {
+ return LogLevelDisplay(typeToLogLevel(type))
+}
diff --git a/frontend/src/scenes/plugins/AppsScene.tsx b/frontend/src/scenes/plugins/AppsScene.tsx
index 0df0f23bc0e79..ba7da3db6bda2 100644
--- a/frontend/src/scenes/plugins/AppsScene.tsx
+++ b/frontend/src/scenes/plugins/AppsScene.tsx
@@ -42,7 +42,6 @@ export function AppsScene(): JSX.Element | null {
return (
<>
([])
const [requiredFields, setRequiredFields] = useState([])
+ const updateInvisibleAndRequiredFields = (): void => {
+ setInvisibleFields(editingPlugin ? determineInvisibleFields(form.getFieldValue, editingPlugin) : [])
+ setRequiredFields(editingPlugin ? determineRequiredFields(form.getFieldValue, editingPlugin) : [])
+ }
+
useEffect(() => {
if (editingPlugin) {
form.setFieldsValue({
@@ -75,59 +85,6 @@ export function PluginDrawer(): JSX.Element {
updateInvisibleAndRequiredFields()
}, [editingPlugin?.id, editingPlugin?.config_schema])
- const updateInvisibleAndRequiredFields = (): void => {
- determineAndSetInvisibleFields()
- determineAndSetRequiredFields()
- }
-
- const determineAndSetInvisibleFields = (): void => {
- const fieldsToSetAsInvisible = []
- for (const field of Object.values(getConfigSchemaArray(editingPlugin?.config_schema || {}))) {
- if (!field.visible_if || !field.key) {
- continue
- }
- const shouldBeVisible = field.visible_if.every(
- ([targetFieldName, targetFieldValue]: Array) =>
- doFieldRequirementsMatch(form, targetFieldName, targetFieldValue)
- )
-
- if (!shouldBeVisible) {
- fieldsToSetAsInvisible.push(field.key)
- }
- }
- setInvisibleFields(fieldsToSetAsInvisible)
- }
-
- const determineAndSetRequiredFields = (): void => {
- const fieldsToSetAsRequired = []
- for (const field of Object.values(getConfigSchemaArray(editingPlugin?.config_schema || {}))) {
- if (!field.required_if || !Array.isArray(field.required_if) || !field.key) {
- continue
- }
- const shouldBeRequired = field.required_if.every(
- ([targetFieldName, targetFieldValue]: Array) =>
- doFieldRequirementsMatch(form, targetFieldName, targetFieldValue)
- )
- if (shouldBeRequired) {
- fieldsToSetAsRequired.push(field.key)
- }
- }
-
- setRequiredFields(fieldsToSetAsRequired)
- }
-
- const isValidChoiceConfig = (fieldConfig: PluginConfigChoice): boolean => {
- return (
- Array.isArray(fieldConfig.choices) &&
- !!fieldConfig.choices.length &&
- !fieldConfig.choices.find((c) => typeof c !== 'string') &&
- !fieldConfig.secret
- )
- }
-
- const isValidField = (fieldConfig: PluginConfigSchema): boolean =>
- fieldConfig.type !== 'choice' || isValidChoiceConfig(fieldConfig)
-
return (
<>
void
diff --git a/frontend/src/scenes/plugins/plugin/LogsDrawer.tsx b/frontend/src/scenes/plugins/plugin/LogsDrawer.tsx
deleted file mode 100644
index 432294928549a..0000000000000
--- a/frontend/src/scenes/plugins/plugin/LogsDrawer.tsx
+++ /dev/null
@@ -1,25 +0,0 @@
-import { Drawer } from 'antd'
-import { useActions, useValues } from 'kea'
-
-import { pluginsLogic } from '../pluginsLogic'
-import { PluginLogs } from './PluginLogs'
-
-export function LogsDrawer(): JSX.Element {
- const { showingLogsPlugin, lastShownLogsPlugin } = useValues(pluginsLogic)
- const { hidePluginLogs } = useActions(pluginsLogic)
-
- return (
-
- {!!lastShownLogsPlugin?.pluginConfig.id && (
-
- )}
-
- )
-}
diff --git a/frontend/src/scenes/plugins/plugin/PluginLogs.tsx b/frontend/src/scenes/plugins/plugin/PluginLogs.tsx
deleted file mode 100644
index 5100f2541d9bb..0000000000000
--- a/frontend/src/scenes/plugins/plugin/PluginLogs.tsx
+++ /dev/null
@@ -1,129 +0,0 @@
-import { LemonButton, LemonCheckbox, LemonInput, LemonTable, LemonTableColumns } from '@posthog/lemon-ui'
-import { useActions, useValues } from 'kea'
-import { LOGS_PORTION_LIMIT } from 'lib/constants'
-import { dayjs } from 'lib/dayjs'
-import { pluralize } from 'lib/utils'
-
-import { PluginLogEntryType } from '../../../types'
-import { pluginLogsLogic, PluginLogsProps } from './pluginLogsLogic'
-
-function PluginLogEntryTypeDisplay(type: PluginLogEntryType): JSX.Element {
- let color: string | undefined
- switch (type) {
- case PluginLogEntryType.Debug:
- color = 'text-muted'
- break
- case PluginLogEntryType.Log:
- color = 'text-default'
- break
- case PluginLogEntryType.Info:
- color = 'text-primary'
- break
- case PluginLogEntryType.Warn:
- color = 'text-warning'
- break
- case PluginLogEntryType.Error:
- color = 'text-danger'
- break
- default:
- break
- }
- return {type}
-}
-
-const columns: LemonTableColumns> = [
- {
- title: 'Timestamp',
- key: 'timestamp',
- dataIndex: 'timestamp',
- render: (timestamp: string) => dayjs(timestamp).format('YYYY-MM-DD HH:mm:ss.SSS UTC'),
- },
- {
- title: 'Source',
- dataIndex: 'source',
- key: 'source',
- },
- {
- title: 'Type',
- key: 'type',
- dataIndex: 'type',
- render: PluginLogEntryTypeDisplay,
- },
- {
- title: 'Message',
- key: 'message',
- dataIndex: 'message',
- render: (message: string) => {message}
,
- },
-]
-
-export function PluginLogs({ pluginConfigId }: PluginLogsProps): JSX.Element {
- const logic = pluginLogsLogic({ pluginConfigId })
-
- const { pluginLogs, pluginLogsLoading, pluginLogsBackground, isThereMoreToLoad, pluginLogsTypes } = useValues(logic)
- const { revealBackground, loadPluginLogsMore, setPluginLogsTypes, setSearchTerm } = useActions(logic)
-
- return (
-
-
-
- Show logs of type:
- {Object.values(PluginLogEntryType).map((type) => {
- return (
- {
- const newPluginLogsTypes = checked
- ? [...pluginLogsTypes, type]
- : pluginLogsTypes.filter((t) => t != type)
- setPluginLogsTypes(newPluginLogsTypes)
- }}
- />
- )
- })}
-
-
- {pluginLogsBackground.length
- ? `Load ${pluralize(pluginLogsBackground.length, 'newer entry', 'newer entries')}`
- : 'No new entries'}
-
-
-
- {!!pluginLogs.length && (
-
- {isThereMoreToLoad ? `Load up to ${LOGS_PORTION_LIMIT} older entries` : 'No older entries'}
-
- )}
-
- )
-}
diff --git a/frontend/src/scenes/plugins/plugin/pluginLogsLogic.ts b/frontend/src/scenes/plugins/plugin/pluginLogsLogic.ts
deleted file mode 100644
index c8db5be4b0f0a..0000000000000
--- a/frontend/src/scenes/plugins/plugin/pluginLogsLogic.ts
+++ /dev/null
@@ -1,165 +0,0 @@
-import { CheckboxValueType } from 'antd/lib/checkbox/Group'
-import { actions, connect, events, kea, key, listeners, path, props, reducers, selectors } from 'kea'
-import { loaders } from 'kea-loaders'
-import { LOGS_PORTION_LIMIT } from 'lib/constants'
-
-import api from '~/lib/api'
-import { PluginLogEntry, PluginLogEntryType } from '~/types'
-
-import { teamLogic } from '../../teamLogic'
-import type { pluginLogsLogicType } from './pluginLogsLogicType'
-
-export interface PluginLogsProps {
- pluginConfigId: number
-}
-
-export const pluginLogsLogic = kea([
- props({} as PluginLogsProps),
- key(({ pluginConfigId }: PluginLogsProps) => pluginConfigId),
- path((key) => ['scenes', 'plugins', 'plugin', 'pluginLogsLogic', key]),
- connect({
- values: [teamLogic, ['currentTeamId']],
- }),
- actions({
- clearPluginLogsBackground: true,
- markLogsEnd: true,
- setPluginLogsTypes: (typeFilters: CheckboxValueType[]) => ({
- typeFilters,
- }),
- setSearchTerm: (searchTerm: string) => ({ searchTerm }),
- }),
- loaders(({ props: { pluginConfigId }, values, actions, cache }) => ({
- pluginLogs: {
- __default: [] as PluginLogEntry[],
- loadPluginLogs: async () => {
- const results = await api.pluginLogs.search(
- pluginConfigId,
- values.currentTeamId,
- values.searchTerm,
- values.typeFilters
- )
- if (!cache.pollingInterval) {
- cache.pollingInterval = setInterval(actions.loadPluginLogsBackgroundPoll, 2000)
- }
- actions.clearPluginLogsBackground()
- return results
- },
- loadPluginLogsMore: async () => {
- const results = await api.pluginLogs.search(
- pluginConfigId,
- values.currentTeamId,
- values.searchTerm,
- values.typeFilters,
- values.trailingEntry
- )
-
- if (results.length < LOGS_PORTION_LIMIT) {
- actions.markLogsEnd()
- }
- return [...values.pluginLogs, ...results]
- },
- revealBackground: () => {
- const newArray = [...values.pluginLogsBackground, ...values.pluginLogs]
- actions.clearPluginLogsBackground()
- return newArray
- },
- },
- pluginLogsBackground: {
- __default: [] as PluginLogEntry[],
- loadPluginLogsBackgroundPoll: async () => {
- if (values.pluginLogsLoading) {
- return values.pluginLogsBackground
- }
-
- const results = await api.pluginLogs.search(
- pluginConfigId,
- values.currentTeamId,
- values.searchTerm,
- values.typeFilters,
- null,
- values.leadingEntry
- )
-
- return [...results, ...values.pluginLogsBackground]
- },
- },
- })),
- reducers({
- pluginLogsTypes: [
- Object.values(PluginLogEntryType).filter((type) => type !== 'DEBUG'),
- {
- setPluginLogsTypes: (_, { typeFilters }) => typeFilters.map((tf) => tf as PluginLogEntryType),
- },
- ],
- pluginLogsBackground: [
- [] as PluginLogEntry[],
- {
- clearPluginLogsBackground: () => [],
- },
- ],
- searchTerm: [
- '',
- {
- setSearchTerm: (_, { searchTerm }) => searchTerm,
- },
- ],
- typeFilters: [
- Object.values(PluginLogEntryType).filter((type) => type !== 'DEBUG') as CheckboxValueType[],
- {
- setPluginLogsTypes: (_, { typeFilters }) => typeFilters || [],
- },
- ],
- isThereMoreToLoad: [
- true,
- {
- loadPluginLogsSuccess: (_, { pluginLogs }) => pluginLogs.length >= LOGS_PORTION_LIMIT,
- markLogsEnd: () => false,
- },
- ],
- }),
- selectors(({ selectors }) => ({
- leadingEntry: [
- () => [selectors.pluginLogs, selectors.pluginLogsBackground],
- (pluginLogs: PluginLogEntry[], pluginLogsBackground: PluginLogEntry[]): PluginLogEntry | null => {
- if (pluginLogsBackground.length) {
- return pluginLogsBackground[0]
- }
- if (pluginLogs.length) {
- return pluginLogs[0]
- }
- return null
- },
- ],
- trailingEntry: [
- () => [selectors.pluginLogs, selectors.pluginLogsBackground],
- (pluginLogs: PluginLogEntry[], pluginLogsBackground: PluginLogEntry[]): PluginLogEntry | null => {
- if (pluginLogs.length) {
- return pluginLogs[pluginLogs.length - 1]
- }
- if (pluginLogsBackground.length) {
- return pluginLogsBackground[pluginLogsBackground.length - 1]
- }
- return null
- },
- ],
- })),
- listeners(({ actions }) => ({
- setPluginLogsTypes: () => {
- actions.loadPluginLogs()
- },
- setSearchTerm: async ({ searchTerm }, breakpoint) => {
- if (searchTerm) {
- await breakpoint(1000)
- }
- actions.loadPluginLogs()
- },
- })),
- events(({ actions, cache }) => ({
- afterMount: () => {
- actions.loadPluginLogs()
- },
- beforeUnmount: () => {
- clearInterval(cache.pollingInterval)
- },
- })),
-])
diff --git a/frontend/src/scenes/plugins/pluginActivityDescriptions.tsx b/frontend/src/scenes/plugins/pluginActivityDescriptions.tsx
index a4889604735c1..0f02b94557722 100644
--- a/frontend/src/scenes/plugins/pluginActivityDescriptions.tsx
+++ b/frontend/src/scenes/plugins/pluginActivityDescriptions.tsx
@@ -9,7 +9,7 @@ import { dayjs } from 'lib/dayjs'
import { ActivityScope } from '~/types'
-import { SECRET_FIELD_VALUE } from './utils'
+import { SECRET_FIELD_VALUE } from '../pipeline/configUtils'
export function pluginActivityDescriber(logItem: ActivityLogItem, asNotification?: boolean): HumanizedChange {
if (logItem.scope !== ActivityScope.PLUGIN && logItem.scope !== ActivityScope.PLUGIN_CONFIG) {
diff --git a/frontend/src/scenes/plugins/pluginsLogic.ts b/frontend/src/scenes/plugins/pluginsLogic.ts
index 34d2744b13279..f769dc366bf12 100644
--- a/frontend/src/scenes/plugins/pluginsLogic.ts
+++ b/frontend/src/scenes/plugins/pluginsLogic.ts
@@ -6,8 +6,8 @@ import api from 'lib/api'
import { lemonToast } from 'lib/lemon-ui/LemonToast/LemonToast'
import posthog from 'posthog-js'
import { frontendAppsLogic } from 'scenes/apps/frontendAppsLogic'
+import { getConfigSchemaArray, getConfigSchemaObject, getPluginConfigFormData } from 'scenes/pipeline/configUtils'
import { createDefaultPluginSource } from 'scenes/plugins/source/createDefaultPluginSource'
-import { getConfigSchemaArray, getConfigSchemaObject, getPluginConfigFormData } from 'scenes/plugins/utils'
import { urls } from 'scenes/urls'
import { userLogic } from 'scenes/userLogic'
@@ -364,12 +364,6 @@ export const pluginsLogic = kea([
hidePluginLogs: () => null,
},
],
- lastShownLogsPluginId: [
- null as number | null,
- {
- showPluginLogs: (_, { id }) => id,
- },
- ],
searchTerm: [
null as string | null,
{
@@ -524,16 +518,6 @@ export const pluginsLogic = kea([
(pluginsLoading, repositoryLoading, pluginConfigsLoading) =>
pluginsLoading || repositoryLoading || pluginConfigsLoading,
],
- showingLogsPlugin: [
- (s) => [s.showingLogsPluginId, s.installedPlugins],
- (showingLogsPluginId, installedPlugins) =>
- showingLogsPluginId ? installedPlugins.find((plugin) => plugin.id === showingLogsPluginId) : null,
- ],
- lastShownLogsPlugin: [
- (s) => [s.lastShownLogsPluginId, s.installedPlugins],
- (lastShownLogsPluginId, installedPlugins) =>
- lastShownLogsPluginId ? installedPlugins.find((plugin) => plugin.id === lastShownLogsPluginId) : null,
- ],
filteredUninstalledPlugins: [
(s) => [s.searchTerm, s.uninstalledPlugins],
(searchTerm, uninstalledPlugins) =>
@@ -662,6 +646,8 @@ export const pluginsLogic = kea([
}
},
generateApiKeysIfNeeded: async ({ form }, breakpoint) => {
+ // TODO: Auto-generated keys for posthogApiKey fields are deprecated
+ // This whole action can be removed at some point
const { editingPlugin } = values
if (!editingPlugin) {
return
diff --git a/frontend/src/scenes/products/productsLogic.tsx b/frontend/src/scenes/products/productsLogic.tsx
index 313e26ea70a1c..4b5a7611e5e5c 100644
--- a/frontend/src/scenes/products/productsLogic.tsx
+++ b/frontend/src/scenes/products/productsLogic.tsx
@@ -2,6 +2,7 @@ import { actions, connect, kea, listeners, path } from 'kea'
import { eventUsageLogic } from 'lib/utils/eventUsageLogic'
import { onboardingLogic } from 'scenes/onboarding/onboardingLogic'
import { teamLogic } from 'scenes/teamLogic'
+import { userLogic } from 'scenes/userLogic'
import { ProductKey } from '~/types'
@@ -11,13 +12,20 @@ export const productsLogic = kea([
path(() => ['scenes', 'products', 'productsLogic']),
connect({
actions: [teamLogic, ['updateCurrentTeam'], onboardingLogic, ['setProduct']],
+ values: [userLogic, ['user']],
}),
actions(() => ({
onSelectProduct: (product: ProductKey) => ({ product }),
})),
- listeners(({ actions }) => ({
+ listeners(({ actions, values }) => ({
onSelectProduct: ({ product }) => {
- eventUsageLogic.actions.reportOnboardingProductSelected(product)
+ const includeFirstOnboardingProductOnUserProperties = values.user?.date_joined
+ ? new Date(values.user?.date_joined) > new Date('2024-01-10T00:00:00Z')
+ : false
+ eventUsageLogic.actions.reportOnboardingProductSelected(
+ product,
+ includeFirstOnboardingProductOnUserProperties
+ )
switch (product) {
case ProductKey.PRODUCT_ANALYTICS:
diff --git a/frontend/src/scenes/project-homepage/ProjectHomepage.tsx b/frontend/src/scenes/project-homepage/ProjectHomepage.tsx
index bd8b2a69b3223..f8be68cd934f6 100644
--- a/frontend/src/scenes/project-homepage/ProjectHomepage.tsx
+++ b/frontend/src/scenes/project-homepage/ProjectHomepage.tsx
@@ -57,7 +57,7 @@ export function ProjectHomepage(): JSX.Element {
return (
-
+
diff --git a/frontend/src/scenes/saved-insights/SavedInsights.tsx b/frontend/src/scenes/saved-insights/SavedInsights.tsx
index 8bd4e6bdea818..75363350bda54 100644
--- a/frontend/src/scenes/saved-insights/SavedInsights.tsx
+++ b/frontend/src/scenes/saved-insights/SavedInsights.tsx
@@ -520,10 +520,7 @@ export function SavedInsights(): JSX.Element {
return (
-
}
- />
+
} />
setSavedInsightsFilters({ tab })}
diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts
index 95fa8461cd6dc..5e7de9b544a04 100644
--- a/frontend/src/scenes/scenes.ts
+++ b/frontend/src/scenes/scenes.ts
@@ -9,7 +9,7 @@ import { Error404 as Error404Component } from '~/layout/Error404'
import { ErrorNetwork as ErrorNetworkComponent } from '~/layout/ErrorNetwork'
import { ErrorProjectUnavailable as ErrorProjectUnavailableComponent } from '~/layout/ErrorProjectUnavailable'
import { EventsQuery } from '~/queries/schema'
-import { ActivityScope, InsightShortId, PipelineAppTabs, PipelineTabs, PropertyFilterType, ReplayTabs } from '~/types'
+import { ActivityScope, InsightShortId, PropertyFilterType, ReplayTabs } from '~/types'
export const emptySceneParams = { params: {}, searchParams: {}, hashParams: {} }
@@ -476,16 +476,8 @@ export const routes: Record = {
[urls.personByDistinctId('*', false)]: Scene.Person,
[urls.personByUUID('*', false)]: Scene.Person,
[urls.persons()]: Scene.PersonsManagement,
- [urls.pipeline()]: Scene.Pipeline,
- // One entry for every available tab
- ...(Object.fromEntries(Object.values(PipelineTabs).map((tab) => [urls.pipeline(tab), Scene.Pipeline])) as Record<
- string,
- Scene
- >),
- // One entry for each available tab (key by app config id)
- ...(Object.fromEntries(
- Object.values(PipelineAppTabs).map((tab) => [urls.pipelineApp(':id', tab), Scene.PipelineApp])
- ) as Record),
+ [urls.pipeline(':tab')]: Scene.Pipeline,
+ [urls.pipelineApp(':kindTab', ':id', ':appTab')]: Scene.PipelineApp,
[urls.groups(':groupTypeIndex')]: Scene.PersonsManagement,
[urls.group(':groupTypeIndex', ':groupKey', false)]: Scene.Group,
[urls.group(':groupTypeIndex', ':groupKey', false, ':groupTab')]: Scene.Group,
diff --git a/frontend/src/scenes/session-recordings/SessionRecordings.tsx b/frontend/src/scenes/session-recordings/SessionRecordings.tsx
index 0c85ffc96c4cc..3bb02e66670bb 100644
--- a/frontend/src/scenes/session-recordings/SessionRecordings.tsx
+++ b/frontend/src/scenes/session-recordings/SessionRecordings.tsx
@@ -60,7 +60,6 @@ export function SessionsRecordings(): JSX.Element {
return (
}
buttons={
<>
{tab === ReplayTabs.Recent && !recordingsDisabled && (
diff --git a/frontend/src/scenes/session-recordings/detail/SessionRecordingDetail.tsx b/frontend/src/scenes/session-recordings/detail/SessionRecordingDetail.tsx
index 6b735227f6376..33bbdfb3e4acb 100644
--- a/frontend/src/scenes/session-recordings/detail/SessionRecordingDetail.tsx
+++ b/frontend/src/scenes/session-recordings/detail/SessionRecordingDetail.tsx
@@ -26,7 +26,7 @@ export function SessionRecordingDetail({ id }: SessionRecordingDetailLogicProps
const { currentTeam } = useValues(teamLogic)
return (
} />
+
{currentTeam && !currentTeam?.session_recording_opt_in ? (
diff --git a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistScene.tsx b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistScene.tsx
index 45d2510b16989..845c2789b37f8 100644
--- a/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistScene.tsx
+++ b/frontend/src/scenes/session-recordings/playlist/SessionRecordingsPlaylistScene.tsx
@@ -23,9 +23,7 @@ export const scene: SceneExport = {
}
export function SessionRecordingsPlaylistScene(): JSX.Element {
- const { playlist, playlistLoading, pinnedRecordings, hasChanges, derivedName } = useValues(
- sessionRecordingsPlaylistSceneLogic
- )
+ const { playlist, playlistLoading, pinnedRecordings, hasChanges } = useValues(sessionRecordingsPlaylistSceneLogic)
const { setFilters, updatePlaylist, duplicatePlaylist, deletePlaylist, onPinnedChange } = useActions(
sessionRecordingsPlaylistSceneLogic
)
@@ -66,17 +64,6 @@ export function SessionRecordingsPlaylistScene(): JSX.Element {
// Margin bottom hacks the fact that our wrapping container has an annoyingly large padding
updatePlaylist({ short_id: playlist.short_id, name: value })}
- saveOnBlur={true}
- maxLength={400}
- data-attr="playlist-name"
- />
- }
buttons={
{
updateSearchParams: true,
})
logic.mount()
- // the logic persists its filters
- logic.actions.resetFilters()
})
describe('core assumptions', () => {
diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts
index 90d9562a4bf7d..961178e00f9f0 100644
--- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts
+++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts
@@ -365,7 +365,6 @@ export const sessionRecordingsPlaylistLogic = kea ({
...state,
diff --git a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx
index 6eaa28fa0f252..bf38e5d9133ad 100644
--- a/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx
+++ b/frontend/src/scenes/settings/organization/VerifiedDomains/VerifyDomainModal.tsx
@@ -43,7 +43,7 @@ export function VerifyDomainModal(): JSX.Element {
@@ -55,6 +55,7 @@ export function VerifyDomainModal(): JSX.Element {
{domainBeingVerified && (
)}
@@ -62,7 +63,7 @@ export function VerifyDomainModal(): JSX.Element {
diff --git a/frontend/src/scenes/surveys/Survey.tsx b/frontend/src/scenes/surveys/Survey.tsx
index 12bebeb2a9c4a..d45c3dbfd2aa8 100644
--- a/frontend/src/scenes/surveys/Survey.tsx
+++ b/frontend/src/scenes/surveys/Survey.tsx
@@ -54,7 +54,6 @@ export function SurveyForm({ id }: { id: string }): JSX.Element {
return (
Create blank survey
diff --git a/frontend/src/scenes/surveys/SurveyView.tsx b/frontend/src/scenes/surveys/SurveyView.tsx
index 9ac71df5662be..7cc725516e593 100644
--- a/frontend/src/scenes/surveys/SurveyView.tsx
+++ b/frontend/src/scenes/surveys/SurveyView.tsx
@@ -53,7 +53,6 @@ export function SurveyView({ id }: { id: string }): JSX.Element {
) : (
<>
-
+
diff --git a/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx b/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx
index 38c1476bcf348..f0ef315e075b4 100644
--- a/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx
+++ b/frontend/src/scenes/trends/viz/ActionsLineGraph.tsx
@@ -10,7 +10,7 @@ import { insightLogic } from 'scenes/insights/insightLogic'
import { cohortsModel } from '~/models/cohortsModel'
import { propertyDefinitionsModel } from '~/models/propertyDefinitionsModel'
import { NodeKind } from '~/queries/schema'
-import { isInsightVizNode, isLifecycleQuery } from '~/queries/utils'
+import { isInsightVizNode, isLifecycleQuery, isStickinessQuery, isTrendsQuery } from '~/queries/utils'
import { ChartDisplayType, ChartParams, GraphType } from '~/types'
import { InsightEmptyState } from '../../insights/EmptyStates'
@@ -43,6 +43,7 @@ export function ActionsLineGraph({
trendsFilter,
isLifecycle,
isStickiness,
+ isTrends,
} = useValues(trendsDataLogic(insightProps))
const labels =
@@ -52,6 +53,27 @@ export function ActionsLineGraph({
(indexedResults[0] && indexedResults[0].labels) ||
[]
+ const isLifecycleQueryWithFeatureFlagOn =
+ featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_LIFECYCLE] &&
+ isLifecycle &&
+ query &&
+ isInsightVizNode(query) &&
+ isLifecycleQuery(query.source)
+
+ const isStickinessQueryWithFeatureFlagOn =
+ featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_STICKINESS] &&
+ isStickiness &&
+ query &&
+ isInsightVizNode(query) &&
+ isStickinessQuery(query.source)
+
+ const isTrendsQueryWithFeatureFlagOn =
+ featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_TRENDS] &&
+ isTrends &&
+ query &&
+ isInsightVizNode(query) &&
+ isTrendsQuery(query.source)
+
return indexedResults &&
indexedResults[0]?.data &&
indexedResults.filter((result) => result.count !== 0).length > 0 ? (
@@ -114,11 +136,9 @@ export function ActionsLineGraph({
)
if (
- featureFlags[FEATURE_FLAGS.HOGQL_INSIGHTS_LIFECYCLE] &&
- isLifecycle &&
- query &&
- isInsightVizNode(query) &&
- isLifecycleQuery(query.source)
+ isLifecycleQueryWithFeatureFlagOn ||
+ isStickinessQueryWithFeatureFlagOn ||
+ isTrendsQueryWithFeatureFlagOn
) {
openPersonsModal({
title,
diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts
index a486716da42a6..ee99664a7e55a 100644
--- a/frontend/src/scenes/urls.ts
+++ b/frontend/src/scenes/urls.ts
@@ -10,8 +10,9 @@ import {
DashboardType,
FilterType,
InsightShortId,
- PipelineAppTabs,
- PipelineTabs,
+ PipelineAppKind,
+ PipelineAppTab,
+ PipelineTab,
ReplayTabs,
} from '~/types'
@@ -101,10 +102,14 @@ export const urls = {
encode ? `/persons/${encodeURIComponent(uuid)}` : `/persons/${uuid}`,
persons: (): string => '/persons',
// TODO: Default to the landing page, once it's ready
- pipeline: (tab?: PipelineTabs): string => `/pipeline/${tab ? tab : PipelineTabs.Destinations}`,
- pipelineApp: (id: string | number, tab?: PipelineAppTabs): string =>
- `/pipeline/${id}/${tab ? tab : PipelineAppTabs.Configuration}`,
- pipelineNew: (tab?: PipelineTabs): string => `/pipeline/${tab ? tab : PipelineTabs.Destinations}/new`,
+ pipeline: (tab?: PipelineTab | ':tab'): string => `/pipeline/${tab ? tab : PipelineTab.Destinations}`,
+ /** @param id 'new' for new, uuid for batch exports and numbers for plugins */
+ pipelineApp: (
+ kind: PipelineAppKind | ':kindTab',
+ id: string | number,
+ appTab?: PipelineAppTab | ':appTab'
+ ): string =>
+ `/pipeline/${!kind.startsWith(':') ? `${kind}s` : kind}/${id}/${appTab ?? PipelineAppTab.Configuration}`,
groups: (groupTypeIndex: string | number): string => `/groups/${groupTypeIndex}`,
// :TRICKY: Note that groupKey is provided by user. We need to override urlPatternOptions for kea-router.
group: (groupTypeIndex: string | number, groupKey: string, encode: boolean = true, tab?: string | null): string =>
diff --git a/frontend/src/stories/How to add tabs to a scene.stories.mdx b/frontend/src/stories/How to add tabs to a scene.stories.mdx
index 328bd664e7cb2..f5c0fe2a40cda 100644
--- a/frontend/src/stories/How to add tabs to a scene.stories.mdx
+++ b/frontend/src/stories/How to add tabs to a scene.stories.mdx
@@ -113,7 +113,7 @@ export function Dashboards(): JSX.Element {
return (
-
+
router.actions.push(urls.dashboards(tab as DashboardsTabs))}
diff --git a/frontend/src/types.ts b/frontend/src/types.ts
index e150f41cd5942..2062f3278a4b0 100644
--- a/frontend/src/types.ts
+++ b/frontend/src/types.ts
@@ -27,6 +27,7 @@ import { LogLevel } from 'rrweb'
import { BehavioralFilterKey, BehavioralFilterType } from 'scenes/cohorts/CohortFilters/types'
import { AggregationAxisFormat } from 'scenes/insights/aggregationAxisFormat'
import { JSONContent } from 'scenes/notebooks/Notebook/utils'
+import { PipelineAppLogLevel } from 'scenes/pipeline/pipelineAppLogsLogic'
import { Scene } from 'scenes/sceneTypes'
import { QueryContext } from '~/queries/types'
@@ -523,14 +524,20 @@ export enum ExperimentsTabs {
Archived = 'archived',
}
-export enum PipelineTabs {
+export enum PipelineTab {
Filters = 'filters',
Transformations = 'transformations',
Destinations = 'destinations',
AppsManagement = 'apps-management',
}
-export enum PipelineAppTabs {
+export enum PipelineAppKind {
+ Filter = 'filter',
+ Transformation = 'transformation',
+ Destination = 'destination',
+}
+
+export enum PipelineAppTab {
Configuration = 'configuration',
Logs = 'logs',
Metrics = 'metrics',
@@ -1518,7 +1525,7 @@ export interface PluginType {
url?: string
tag?: string
icon?: string
- latest_tag?: string
+ latest_tag?: string // apps management page: The latest git hash for the repo behind the url
config_schema: Record | PluginConfigSchema[]
source?: string
maintainer?: string
@@ -1596,6 +1603,7 @@ export interface PluginConfigTypeNew {
description?: string
updated_at: string
delivery_rate_24h?: number | null
+ config: Record
}
// TODO: Rename to PluginConfigWithPluginInfo once the are removed from the frontend
@@ -1631,20 +1639,12 @@ export interface PluginLogEntry {
instance_id: string
}
-export enum BatchExportLogEntryLevel {
- Debug = 'DEBUG',
- Log = 'LOG',
- Info = 'INFO',
- Warning = 'WARNING',
- Error = 'ERROR',
-}
-
export interface BatchExportLogEntry {
team_id: number
batch_export_id: number
run_id: number
timestamp: string
- level: BatchExportLogEntryLevel
+ level: PipelineAppLogLevel
message: string
}
diff --git a/latest_migrations.manifest b/latest_migrations.manifest
index 56feba7f32e78..ec3953b0693e2 100644
--- a/latest_migrations.manifest
+++ b/latest_migrations.manifest
@@ -5,7 +5,7 @@ contenttypes: 0002_remove_content_type_name
ee: 0015_add_verified_properties
otp_static: 0002_throttling
otp_totp: 0002_auto_20190420_0723
-posthog: 0383_externaldatasource_cascade
+posthog: 0384_activity_log_was_impersonated
sessions: 0001_initial
social_django: 0010_uid_db_index
two_factor: 0007_auto_20201201_1019
diff --git a/mypy-baseline.txt b/mypy-baseline.txt
index 37a339773ade4..ad7a29cee1b07 100644
--- a/mypy-baseline.txt
+++ b/mypy-baseline.txt
@@ -81,9 +81,7 @@ posthog/hogql/database/schema/session_replay_events.py:0: note: Consider using "
posthog/hogql/database/schema/event_sessions.py:0: error: Statement is unreachable [unreachable]
posthog/plugins/utils.py:0: error: Subclass of "str" and "bytes" cannot exist: would have incompatible method signatures [unreachable]
posthog/plugins/utils.py:0: error: Statement is unreachable [unreachable]
-posthog/plugins/utils.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/models/filters/base_filter.py:0: error: "HogQLContext" has no attribute "person_on_events_mode" [attr-defined]
-posthog/models/filters/base_filter.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
posthog/hogql/database/database.py:0: error: "FieldOrTable" has no attribute "fields" [attr-defined]
@@ -97,15 +95,18 @@ posthog/hogql/database/database.py:0: error: "Table" has no attribute "hogql_def
posthog/hogql/database/database.py:0: error: Incompatible types (expression has type "Literal['view', 'lazy_table']", TypedDict item "type" has type "Literal['integer', 'float', 'string', 'datetime', 'date', 'boolean', 'array', 'json', 'lazy_table', 'virtual_table', 'field_traverser']") [typeddict-item]
posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Argument 1 to "create_hogql_database" has incompatible type "int | None"; expected "int" [arg-type]
posthog/warehouse/models/datawarehouse_saved_query.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment]
-posthog/models/user.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/models/person/person.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "None", base class "AbstractUser" defined the type as "CharField[str | int | Combinable, str]") [assignment]
+posthog/models/user.py:0: error: Incompatible types in assignment (expression has type "posthog.models.user.UserManager", base class "AbstractUser" defined the type as "django.contrib.auth.models.UserManager[AbstractUser]") [assignment]
+posthog/models/user.py:0: error: Cannot override writeable attribute with read-only property [override]
+posthog/models/user.py:0: error: Signature of "is_superuser" incompatible with supertype "PermissionsMixin" [override]
+posthog/models/user.py:0: note: Superclass:
+posthog/models/user.py:0: note: BooleanField[bool | Combinable, bool]
+posthog/models/user.py:0: note: Subclass:
+posthog/models/user.py:0: note: bool
+posthog/models/user.py:0: error: "User" has no attribute "social_auth" [attr-defined]
+posthog/models/user.py:0: error: "User" has no attribute "social_auth" [attr-defined]
+posthog/models/person/person.py:0: error: Incompatible types in assignment (expression has type "list[Never]", variable has type "ValuesQuerySet[PersonDistinctId, str]") [assignment]
posthog/models/feature_flag/flag_matching.py:0: error: Statement is unreachable [unreachable]
-posthog/queries/query_date_range.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/queries/query_date_range.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/queries/query_date_range.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/queries/query_date_range.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/queries/query_date_range.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/queries/actor_base_query.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/hogql_queries/utils/query_date_range.py:0: error: Incompatible return value type (got "str", expected "Literal['hour', 'day', 'week', 'month']") [return-value]
posthog/hogql_queries/utils/query_date_range.py:0: error: Item "None" of "dict[str, int] | None" has no attribute "get" [union-attr]
posthog/hogql_queries/utils/query_date_range.py:0: error: Statement is unreachable [unreachable]
@@ -166,8 +167,8 @@ posthog/hogql/modifiers.py:0: error: Incompatible types in assignment (expressio
posthog/hogql/functions/cohort.py:0: error: Argument 1 to "escape_clickhouse_string" has incompatible type "str | None"; expected "float | int | str | list[Any] | tuple[Any, ...] | date | datetime | UUID | UUIDT" [arg-type]
posthog/hogql/functions/cohort.py:0: error: Argument 1 to "escape_clickhouse_string" has incompatible type "str | None"; expected "float | int | str | list[Any] | tuple[Any, ...] | date | datetime | UUID | UUIDT" [arg-type]
posthog/hogql/functions/cohort.py:0: error: Incompatible types in assignment (expression has type "ValuesQuerySet[Cohort, tuple[int, bool | None]]", variable has type "ValuesQuerySet[Cohort, tuple[int, bool | None, str | None]]") [assignment]
-posthog/api/utils.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/api/utils.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/api/utils.py:0: error: Incompatible types in assignment (expression has type "type[EventDefinition]", variable has type "type[EnterpriseEventDefinition]") [assignment]
+posthog/api/utils.py:0: error: Argument 1 to "UUID" has incompatible type "int | str"; expected "str | None" [arg-type]
ee/billing/quota_limiting.py:0: error: List comprehension has incompatible type List[int]; expected List[str] [misc]
ee/billing/quota_limiting.py:0: error: Unsupported target for indexed assignment ("object") [index]
ee/billing/quota_limiting.py:0: error: "object" has no attribute "get" [attr-defined]
@@ -364,7 +365,7 @@ posthog/hogql/query.py:0: error: Incompatible types in assignment (expression ha
posthog/hogql/query.py:0: error: Argument 1 to "get_default_limit_for_context" has incompatible type "LimitContext | None"; expected "LimitContext" [arg-type]
posthog/hogql/query.py:0: error: "SelectQuery" has no attribute "select_queries" [attr-defined]
posthog/hogql/query.py:0: error: Subclass of "SelectQuery" and "SelectUnionQuery" cannot exist: would have incompatible method signatures [unreachable]
-posthog/hogql_queries/query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/hogql_queries/query_runner.py:0: error: Incompatible types in assignment (expression has type "HogQLQuery | TrendsQuery | LifecycleQuery | InsightActorsQuery | EventsQuery | ActorsQuery | RetentionQuery | SessionsTimelineQuery | WebOverviewQuery | WebTopClicksQuery | WebStatsTableQuery | StickinessQuery | BaseModel | dict[str, Any]", variable has type "HogQLQuery | TrendsQuery | LifecycleQuery | InsightActorsQuery | EventsQuery | ActorsQuery | RetentionQuery | SessionsTimelineQuery | WebOverviewQuery | WebTopClicksQuery | WebStatsTableQuery | StickinessQuery") [assignment]
posthog/hogql_queries/insights/trends/breakdown_values.py:0: error: Argument "chain" to "Field" has incompatible type "list[str]"; expected "list[str | int]" [arg-type]
posthog/hogql_queries/insights/trends/breakdown_values.py:0: note: "List" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
posthog/hogql_queries/insights/trends/breakdown_values.py:0: note: Consider using "Sequence" instead, which is covariant
@@ -373,7 +374,6 @@ posthog/hogql_queries/insights/trends/breakdown_values.py:0: error: Argument "br
posthog/hogql_queries/insights/trends/breakdown_values.py:0: error: Incompatible types in assignment (expression has type "float | int", variable has type "int") [assignment]
posthog/hogql_queries/insights/trends/breakdown_values.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr]
posthog/hogql_queries/insights/trends/breakdown_values.py:0: error: Value of type "list[Any] | None" is not indexable [index]
-posthog/queries/event_query/event_query.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/hogql_queries/sessions_timeline_query_runner.py:0: error: Statement is unreachable [unreachable]
posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_type" [union-attr]
posthog/hogql_queries/insights/trends/breakdown.py:0: error: Item "None" of "BreakdownFilter | None" has no attribute "breakdown_histogram_bin_count" [union-attr]
@@ -424,6 +424,7 @@ posthog/hogql_queries/hogql_query_runner.py:0: error: Incompatible types in assi
posthog/hogql_queries/hogql_query_runner.py:0: error: Incompatible return value type (got "SelectQuery | SelectUnionQuery", expected "SelectQuery") [return-value]
posthog/hogql_queries/events_query_runner.py:0: error: Statement is unreachable [unreachable]
posthog/hogql_queries/events_query_runner.py:0: error: Argument "order_by" to "SelectQuery" has incompatible type "list[Expr]"; expected "list[OrderExpr] | None" [arg-type]
+posthog/hogql/metadata.py:0: error: Argument "metadata_source" to "translate_hogql" has incompatible type "SelectQuery | SelectUnionQuery"; expected "SelectQuery | None" [arg-type]
posthog/hogql/metadata.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery | SelectUnionQuery") [assignment]
posthog/queries/breakdown_props.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type]
posthog/hogql_queries/insights/trends/query_builder.py:0: error: Incompatible types in assignment (expression has type "SelectUnionQuery", variable has type "SelectQuery") [assignment]
@@ -434,11 +435,11 @@ posthog/queries/funnels/base.py:0: error: "HogQLContext" has no attribute "perso
posthog/queries/funnels/base.py:0: error: Argument 1 to "translate_hogql" has incompatible type "str | int"; expected "str" [arg-type]
ee/clickhouse/queries/funnels/funnel_correlation.py:0: error: Statement is unreachable [unreachable]
posthog/caching/calculate_results.py:0: error: Argument 3 to "process_query" has incompatible type "bool"; expected "LimitContext | None" [arg-type]
-posthog/api/person.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/api/person.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/api/person.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/api/person.py:0: error: Argument 1 to "loads" has incompatible type "str | None"; expected "str | bytes | bytearray" [arg-type]
+posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type]
+posthog/api/person.py:0: error: Argument "user" to "log_activity" has incompatible type "User | AnonymousUser"; expected "User | None" [arg-type]
posthog/hogql_queries/web_analytics/web_analytics_query_runner.py:0: error: Argument 1 to "append" of "list" has incompatible type "EventPropertyFilter"; expected "Expr" [arg-type]
-posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Return type "list[SelectQuery]" of "to_query" incompatible with return type "SelectQuery" in supertype "QueryRunner" [override]
+posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Return type "list[SelectQuery]" of "to_query" incompatible with return type "SelectQuery | SelectUnionQuery" in supertype "QueryRunner" [override]
posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Incompatible return value type (got "list[SelectQuery | SelectUnionQuery]", expected "list[SelectQuery]") [return-value]
posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Need type annotation for "timings" (hint: "timings: List[] = ...") [var-annotated]
posthog/hogql_queries/insights/trends/trends_query_runner.py:0: error: Argument 1 to "extend" of "list" has incompatible type "list[QueryTiming] | None"; expected "Iterable[Any]" [arg-type]
@@ -464,7 +465,6 @@ posthog/hogql_queries/insights/retention_query_runner.py:0: note: Consider using
posthog/hogql_queries/insights/retention_query_runner.py:0: error: Argument "group_by" to "SelectQuery" has incompatible type "list[Field] | None"; expected "list[Expr] | None" [arg-type]
posthog/hogql_queries/insights/retention_query_runner.py:0: error: Item "None" of "JoinExpr | None" has no attribute "sample" [union-attr]
posthog/hogql_queries/insights/retention_query_runner.py:0: error: Incompatible return value type (got "SelectQuery | SelectUnionQuery", expected "SelectQuery") [return-value]
-posthog/hogql_queries/insights/retention_query_runner.py:0: error: Return type "SelectQuery | SelectUnionQuery" of "to_query" incompatible with return type "SelectQuery" in supertype "QueryRunner" [override]
posthog/hogql_queries/insights/retention_query_runner.py:0: error: Argument 2 to "parse_select" has incompatible type "dict[str, SelectQuery]"; expected "dict[str, Expr] | None" [arg-type]
posthog/hogql_queries/insights/retention_query_runner.py:0: note: "Dict" is invariant -- see https://mypy.readthedocs.io/en/stable/common_issues.html#variance
posthog/hogql_queries/insights/retention_query_runner.py:0: note: Consider using "Mapping" instead, which is covariant in the value type
@@ -473,7 +473,6 @@ posthog/hogql_queries/insights/retention_query_runner.py:0: error: Unsupported o
posthog/hogql_queries/insights/retention_query_runner.py:0: note: Right operand is of type "int | None"
posthog/hogql_queries/insights/retention_query_runner.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select" [union-attr]
posthog/hogql_queries/insights/retention_query_runner.py:0: error: Incompatible return value type (got "SelectQuery | SelectUnionQuery", expected "SelectQuery") [return-value]
-posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Return type "SelectQuery | SelectUnionQuery" of "to_query" incompatible with return type "SelectQuery" in supertype "QueryRunner" [override]
posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "Constant") [assignment]
posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "Constant") [assignment]
posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Argument "exprs" to "And" has incompatible type "list[CompareOperation]"; expected "list[Expr]" [arg-type]
@@ -482,17 +481,10 @@ posthog/hogql_queries/insights/lifecycle_query_runner.py:0: note: Consider using
posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Argument 1 to "sorted" has incompatible type "list[Any] | None"; expected "Iterable[Any]" [arg-type]
posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Item "SelectUnionQuery" of "SelectQuery | SelectUnionQuery" has no attribute "select_from" [union-attr]
posthog/hogql_queries/insights/lifecycle_query_runner.py:0: error: Item "None" of "JoinExpr | Any | None" has no attribute "sample" [union-attr]
-posthog/hogql_queries/web_analytics/web_overview.py:0: error: Return type "SelectQuery | SelectUnionQuery" of "to_query" incompatible with return type "SelectQuery" in supertype "QueryRunner" [override]
posthog/hogql_queries/web_analytics/web_overview.py:0: error: Value of type "list[Any] | None" is not indexable [index]
-posthog/hogql_queries/web_analytics/top_clicks.py:0: error: Return type "SelectQuery | SelectUnionQuery" of "to_query" incompatible with return type "SelectQuery" in supertype "QueryRunner" [override]
-posthog/hogql_queries/web_analytics/stats_table.py:0: error: Return type "SelectQuery | SelectUnionQuery" of "to_query" incompatible with return type "SelectQuery" in supertype "QueryRunner" [override]
posthog/hogql_queries/legacy_compatibility/process_insight.py:0: error: Incompatible types in assignment (expression has type "PathFilter", variable has type "RetentionFilter") [assignment]
posthog/hogql_queries/legacy_compatibility/process_insight.py:0: error: Incompatible types in assignment (expression has type "StickinessFilter", variable has type "RetentionFilter") [assignment]
posthog/hogql_queries/legacy_compatibility/process_insight.py:0: error: Incompatible types in assignment (expression has type "Filter", variable has type "RetentionFilter") [assignment]
-posthog/hogql_queries/insights/insight_actors_query_runner.py:0: error: Return type "SelectQuery | SelectUnionQuery" of "to_query" incompatible with return type "SelectQuery" in supertype "QueryRunner" [override]
-posthog/hogql_queries/actors_query_runner.py:0: error: Item "int" of "str | int" has no attribute "lower" [union-attr]
-posthog/hogql_queries/actors_query_runner.py:0: error: Incompatible return value type (got "list[str | int]", expected "list[str]") [return-value]
-posthog/hogql_queries/actors_query_runner.py:0: error: Argument 1 to "source_id_column" of "ActorsQueryRunner" has incompatible type "SelectQuery | SelectUnionQuery"; expected "SelectQuery" [arg-type]
posthog/hogql_queries/actors_query_runner.py:0: error: Incompatible types in assignment (expression has type "Field", variable has type "Constant") [assignment]
posthog/hogql_queries/actors_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "Constant") [assignment]
posthog/hogql_queries/actors_query_runner.py:0: error: Statement is unreachable [unreachable]
@@ -505,15 +497,14 @@ posthog/api/insight.py:0: error: Argument 1 to "is_insight_with_hogql_support" h
posthog/api/insight.py:0: error: Argument 1 to "process_insight" has incompatible type "Insight | DashboardTile"; expected "Insight" [arg-type]
posthog/api/dashboards/dashboard.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
posthog/api/feature_flag.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
-posthog/api/feature_flag.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/api/feature_flag.py:0: error: Item "Sequence[Any]" of "Any | Sequence[Any] | None" has no attribute "filters" [union-attr]
+posthog/api/feature_flag.py:0: error: Item "None" of "Any | Sequence[Any] | None" has no attribute "filters" [union-attr]
posthog/api/survey.py:0: error: Incompatible types in assignment (expression has type "Any | Sequence[Any] | None", variable has type "Survey | None") [assignment]
-posthog/api/user.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/api/user.py:0: error: "User" has no attribute "social_auth" [attr-defined]
ee/api/role.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
ee/api/dashboard_collaborator.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
-ee/api/test/base.py:0: error: Unused "type: ignore" comment [unused-ignore]
-ee/api/test/base.py:0: error: Unused "type: ignore" comment [unused-ignore]
-ee/api/test/base.py:0: error: Unused "type: ignore" comment [unused-ignore]
-ee/api/test/base.py:0: error: Unused "type: ignore" comment [unused-ignore]
+ee/api/test/base.py:0: error: Incompatible types in assignment (expression has type "None", variable has type "License") [assignment]
+ee/api/test/base.py:0: error: "setUpTestData" undefined in superclass [misc]
posthog/temporal/batch_exports/squash_person_overrides.py:0: error: Return type "SquashPersonOverridesInputs" of "parse_inputs" incompatible with return type "BatchExportsInputsProtocol" in supertype "PostHogWorkflow" [override]
posthog/tasks/exports/test/test_image_exporter.py:0: error: Function is missing a type annotation [no-untyped-def]
posthog/tasks/exports/test/test_image_exporter.py:0: error: Function is missing a type annotation [no-untyped-def]
@@ -543,23 +534,10 @@ posthog/session_recordings/queries/session_recording_list_from_replay_summary.py
posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: note: If the method is meant to be abstract, use @abc.abstractmethod
posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: error: Incompatible types in assignment (expression has type "PersonOnEventsMode", variable has type "PersonsOnEventsMode | None") [assignment]
posthog/session_recordings/queries/session_recording_list_from_replay_summary.py:0: error: Incompatible types in assignment (expression has type "PersonOnEventsMode", variable has type "PersonsOnEventsMode | None") [assignment]
-posthog/migrations/0305_rework_person_overrides.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/migrations/0176_update_person_props_function.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/migrations/0175_should_update_person_props_function.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/migrations/0173_should_update_person_props_function.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/migrations/0148_merge_20210506_0823.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/migrations/0129_merge_20210223_0757.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/migrations/0117_merge_20210126_0917.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/migrations/0041_merge_20200407_1805.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/hogql_queries/test/test_query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/hogql_queries/test/test_query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/hogql_queries/test/test_query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/hogql_queries/test/test_query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/hogql_queries/test/test_query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/hogql_queries/test/test_query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/hogql_queries/test/test_query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/hogql_queries/test/test_query_runner.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/hogql_queries/test/test_query_runner.py:0: error: Incompatible default for argument "query_class" (default has type "type[TestQuery]", argument has type "type[HogQLQuery] | type[TrendsQuery] | type[LifecycleQuery] | type[InsightActorsQuery] | type[EventsQuery] | type[ActorsQuery] | type[RetentionQuery] | type[SessionsTimelineQuery] | type[WebOverviewQuery] | type[WebTopClicksQuery] | type[WebStatsTableQuery] | type[StickinessQuery]") [assignment]
+posthog/hogql_queries/test/test_query_runner.py:0: error: Variable "TestQueryRunner" is not valid as a type [valid-type]
+posthog/hogql_queries/test/test_query_runner.py:0: note: See https://mypy.readthedocs.io/en/stable/common_issues.html#variables-vs-type-aliases
+posthog/hogql_queries/test/test_query_runner.py:0: error: Invalid base class "TestQueryRunner" [misc]
posthog/hogql_queries/test/test_query_runner.py:0: error: Unsupported right operand type for in ("str | None") [operator]
posthog/hogql_queries/test/test_query_runner.py:0: error: Unsupported right operand type for in ("str | None") [operator]
posthog/hogql_queries/test/test_hogql_query_runner.py:0: error: Incompatible types in assignment (expression has type "Expr", variable has type "SelectQuery") [assignment]
@@ -764,8 +742,6 @@ posthog/hogql/database/schema/test/test_channel_type.py:0: error: Value of type
posthog/hogql/database/schema/test/test_channel_type.py:0: error: Value of type "list[Any] | None" is not indexable [index]
posthog/api/organization_member.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
ee/api/feature_flag_role_access.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
-ee/api/integration.py:0: error: Unused "type: ignore" comment [unused-ignore]
-ee/api/integration.py:0: error: Unused "type: ignore" comment [unused-ignore]
ee/clickhouse/views/insights.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
posthog/queries/trends/test/test_person.py:0: error: "str" has no attribute "get" [attr-defined]
posthog/queries/trends/test/test_person.py:0: error: Invalid index type "int" for "HttpResponse"; expected type "str | bytes" [index]
@@ -856,8 +832,21 @@ posthog/hogql/database/test/test_view.py:0: error: Argument "dialect" to "print_
posthog/hogql/database/test/test_saved_query.py:0: error: Argument "dialect" to "print_ast" has incompatible type "str"; expected "Literal['hogql', 'clickhouse']" [arg-type]
posthog/hogql/database/test/test_s3_table.py:0: error: Argument "dialect" to "print_ast" has incompatible type "str"; expected "Literal['hogql', 'clickhouse']" [arg-type]
posthog/api/search.py:0: error: Argument "extra_fields" to "class_queryset" has incompatible type "object"; expected "dict[Any, Any] | None" [arg-type]
-posthog/api/property_definition.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/api/property_definition.py:0: error: Unused "type: ignore" comment [unused-ignore]
+posthog/api/property_definition.py:0: error: Item "AnonymousUser" of "User | AnonymousUser" has no attribute "organization" [union-attr]
+posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any | None" has no attribute "is_feature_available" [union-attr]
+posthog/api/property_definition.py:0: error: Item "Col" of "Col | Any" has no attribute "alias" [union-attr]
+posthog/api/property_definition.py:0: error: Item "ForeignObjectRel" of "Field[Any, Any] | ForeignObjectRel" has no attribute "cached_col" [union-attr]
+posthog/api/property_definition.py:0: error: Item "BasePagination" of "BasePagination | None" has no attribute "get_limit" [union-attr]
+posthog/api/property_definition.py:0: error: Item "None" of "BasePagination | None" has no attribute "get_limit" [union-attr]
+posthog/api/property_definition.py:0: error: Item "BasePagination" of "BasePagination | None" has no attribute "get_offset" [union-attr]
+posthog/api/property_definition.py:0: error: Item "None" of "BasePagination | None" has no attribute "get_offset" [union-attr]
+posthog/api/property_definition.py:0: error: Item "BasePagination" of "BasePagination | None" has no attribute "set_count" [union-attr]
+posthog/api/property_definition.py:0: error: Item "None" of "BasePagination | None" has no attribute "set_count" [union-attr]
+posthog/api/property_definition.py:0: error: Item "AnonymousUser" of "User | AnonymousUser" has no attribute "organization" [union-attr]
+posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any | None" has no attribute "is_feature_available" [union-attr]
+posthog/api/property_definition.py:0: error: Incompatible types in assignment (expression has type "type[EnterprisePropertyDefinitionSerializer]", variable has type "type[PropertyDefinitionSerializer]") [assignment]
+posthog/api/property_definition.py:0: error: Item "AnonymousUser" of "User | AnonymousUser" has no attribute "organization" [union-attr]
+posthog/api/property_definition.py:0: error: Item "None" of "Organization | Any | None" has no attribute "is_feature_available" [union-attr]
posthog/api/dashboards/dashboard_templates.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_calls" (hint: "_execute_calls: List[] = ...") [var-annotated]
posthog/temporal/tests/batch_exports/test_snowflake_batch_export_workflow.py:0: error: Need type annotation for "_execute_async_calls" (hint: "_execute_async_calls: List[] = ...") [var-annotated]
@@ -924,8 +913,6 @@ posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error:
posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py:0: error: "tuple[Any, ...]" has no attribute "upload_state" [attr-defined]
posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py:0: error: Unsupported left operand type for + ("None") [operator]
posthog/temporal/tests/batch_exports/test_postgres_batch_export_workflow.py:0: note: Left operand is of type "Any | None"
-posthog/temporal/tests/batch_exports/test_logger.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/temporal/tests/batch_exports/test_logger.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required]
posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required]
posthog/temporal/tests/batch_exports/test_batch_exports.py:0: error: TypedDict key must be a string literal; expected one of ("_timestamp", "created_at", "distinct_id", "elements", "elements_chain", ...) [literal-required]
@@ -942,7 +929,6 @@ posthog/temporal/tests/batch_exports/test_backfill_batch_export.py:0: error: Arg
posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type]
posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type]
posthog/queries/app_metrics/test/test_app_metrics.py:0: error: Argument 3 to "AppMetricsErrorDetailsQuery" has incompatible type "AppMetricsRequestSerializer"; expected "AppMetricsErrorsRequestSerializer" [arg-type]
-posthog/batch_exports/http.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/batch_exports/http.py:0: error: Unsupported right operand type for in ("object") [operator]
posthog/api/plugin.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
posthog/api/plugin.py:0: error: Metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases [misc]
@@ -954,8 +940,6 @@ posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: e
posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: note: Left operand is of type "Any | None"
posthog/temporal/tests/batch_exports/test_redshift_batch_export_workflow.py:0: error: Incompatible types in assignment (expression has type "str | int", variable has type "int") [assignment]
posthog/api/test/batch_exports/conftest.py:0: error: Argument "activities" to "ThreadedWorker" has incompatible type "list[function]"; expected "Sequence[Callable[..., Any]]" [arg-type]
-posthog/api/sharing.py:0: error: Unused "type: ignore" comment [unused-ignore]
-posthog/api/sharing.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible return value type (got "dict[str, Collection[str]]", expected "dict[str, str]") [return-value]
posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Incompatible types in assignment (expression has type "dict[str, Collection[str]]", variable has type "dict[str, str]") [assignment]
posthog/management/commands/test/test_create_batch_export_from_app.py:0: error: Unpacked dict entry 1 has incompatible type "str"; expected "SupportsKeysAndGetItem[str, str]" [dict-item]
@@ -999,7 +983,6 @@ posthog/api/test/batch_exports/test_update.py:0: error: Value of type "BatchExpo
posthog/api/test/batch_exports/test_pause.py:0: error: "batch_export_delete_schedule" does not return a value (it only ever returns None) [func-returns-value]
posthog/api/test/batch_exports/test_log_entry.py:0: error: Need type annotation for "results" (hint: "results: List[] = ...") [var-annotated]
posthog/api/test/batch_exports/test_log_entry.py:0: error: Need type annotation for "results" (hint: "results: List[] = ...") [var-annotated]
-posthog/api/test/test_capture.py:0: error: Unused "type: ignore" comment [unused-ignore]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item]
posthog/api/test/test_capture.py:0: error: Dict entry 0 has incompatible type "Any": "float"; expected "str": "int" [dict-item]
diff --git a/package.json b/package.json
index c70032225fcde..5fe2a984cc03d 100644
--- a/package.json
+++ b/package.json
@@ -141,7 +141,7 @@
"pmtiles": "^2.11.0",
"postcss": "^8.4.31",
"postcss-preset-env": "^9.3.0",
- "posthog-js": "1.97.1",
+ "posthog-js": "1.98.2",
"posthog-js-lite": "2.5.0",
"prettier": "^2.8.8",
"prop-types": "^15.7.2",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index ccbd8c966fb1e..172c7a07ec1c4 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -240,8 +240,8 @@ dependencies:
specifier: ^9.3.0
version: 9.3.0(postcss@8.4.31)
posthog-js:
- specifier: 1.97.1
- version: 1.97.1
+ specifier: 1.98.2
+ version: 1.98.2
posthog-js-lite:
specifier: 2.5.0
version: 2.5.0
@@ -17233,8 +17233,8 @@ packages:
resolution: {integrity: sha512-Urvlp0Vu9h3td0BVFWt0QXFJDoOZcaAD83XM9d91NKMKTVPZtfU0ysoxstIf5mw/ce9ZfuMgpWPaagrZI4rmSg==}
dev: false
- /posthog-js@1.97.1:
- resolution: {integrity: sha512-N5r7sm4EiSxBgFTZuReR7wvHgzK/UkPbIsy/nZuIexLD+oa94ZQFNY/i1o8utDNmXHFWVKv4rwFmzRmPZYc8Zw==}
+ /posthog-js@1.98.2:
+ resolution: {integrity: sha512-u0N98I81UV/lTQWBbjdqCcacbhPZHmApc8CNsvk1y9/iqHPShoKcbjRvAjtAw5ujD8kiX1GdrmxN3i6erxJBVg==}
dependencies:
fflate: 0.4.8
dev: false
diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py
index 63283a026942f..0953bb915a35c 100644
--- a/posthog/api/__init__.py
+++ b/posthog/api/__init__.py
@@ -77,16 +77,16 @@ def api_not_found(request):
"project_plugins_config_logs",
["team_id", "plugin_config_id"],
)
-pipeline_transformations_configs_router = projects_router.register(
- r"pipeline_transformations_configs",
+pipeline_transformation_configs_router = projects_router.register(
+ r"pipeline_transformation_configs",
plugin.PipelineTransformationsConfigsViewSet,
- "pipeline_transformations_configs",
+ "project_pipeline_transformation_configs",
["team_id"],
)
-pipeline_destinations_configs_router = projects_router.register(
- r"pipeline_destinations_configs",
+pipeline_destination_configs_router = projects_router.register(
+ r"pipeline_destination_configs",
plugin.PipelineDestinationsConfigsViewSet,
- "pipeline_destinations_configs",
+ "project_pipeline_destination_configs",
["team_id"],
)
diff --git a/posthog/api/event_definition.py b/posthog/api/event_definition.py
index 6616c8beaddab..5ee9e6ddc6b64 100644
--- a/posthog/api/event_definition.py
+++ b/posthog/api/event_definition.py
@@ -28,6 +28,7 @@
TeamMemberAccessPermission,
)
from posthog.settings import EE_AVAILABLE
+from loginas.utils import is_impersonated_session
# If EE is enabled, we use ee.api.ee_event_definition.EnterpriseEventDefinitionSerializer
@@ -187,6 +188,7 @@ def destroy(self, request: request.Request, *args: Any, **kwargs: Any) -> respon
organization_id=cast(UUIDT, self.organization_id),
team_id=self.team_id,
user=user,
+ was_impersonated=is_impersonated_session(request),
item_id=instance_id,
scope="EventDefinition",
activity="deleted",
diff --git a/posthog/api/exports.py b/posthog/api/exports.py
index e4f5dd8104f8c..f5fd304c75d85 100644
--- a/posthog/api/exports.py
+++ b/posthog/api/exports.py
@@ -24,6 +24,7 @@
TeamMemberAccessPermission,
)
from posthog.tasks import exporter
+from loginas.utils import is_impersonated_session
logger = structlog.get_logger(__name__)
@@ -99,6 +100,9 @@ def _create_asset(
organization_id=insight.team.organization.id,
team_id=self.context["team_id"],
user=user,
+ was_impersonated=is_impersonated_session(self.context["request"])
+ if "request" in self.context
+ else False,
item_id=insight_id, # Type: ignore
scope="Insight",
activity="exported" if reason is None else f"exported for {reason}",
diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py
index dcc040606cadc..abbd123ff929f 100644
--- a/posthog/api/feature_flag.py
+++ b/posthog/api/feature_flag.py
@@ -55,6 +55,7 @@
TeamMemberAccessPermission,
)
from posthog.rate_limit import BurstRateThrottle
+from loginas.utils import is_impersonated_session
DATABASE_FOR_LOCAL_EVALUATION = (
"default"
@@ -152,7 +153,7 @@ def get_features(self, feature_flag: FeatureFlag) -> Dict:
def get_surveys(self, feature_flag: FeatureFlag) -> Dict:
from posthog.api.survey import SurveyAPISerializer
- return SurveyAPISerializer(feature_flag.surveys_linked_flag, many=True).data # type: ignore
+ return SurveyAPISerializer(feature_flag.surveys_linked_flag, many=True).data
# ignoring type because mypy doesn't know about the surveys_linked_flag `related_name` relationship
def get_rollout_percentage(self, feature_flag: FeatureFlag) -> Optional[int]:
@@ -180,7 +181,7 @@ def validate_filters(self, filters):
# If we see this, just return the current filters
if "groups" not in filters and self.context["request"].method == "PATCH":
# mypy cannot tell that self.instance is a FeatureFlag
- return self.instance.filters # type: ignore
+ return self.instance.filters
aggregation_group_type_index = filters.get("aggregation_group_type_index", None)
@@ -683,6 +684,7 @@ def perform_create(self, serializer):
organization_id=self.organization.id,
team_id=self.team_id,
user=serializer.context["request"].user,
+ was_impersonated=is_impersonated_session(serializer.context["request"]),
item_id=serializer.instance.id,
scope="FeatureFlag",
activity="created",
@@ -705,6 +707,7 @@ def perform_update(self, serializer):
organization_id=self.organization.id,
team_id=self.team_id,
user=serializer.context["request"].user,
+ was_impersonated=is_impersonated_session(serializer.context["request"]),
item_id=instance_id,
scope="FeatureFlag",
activity="updated",
diff --git a/posthog/api/insight.py b/posthog/api/insight.py
index 2f88ee1c4929e..c42b3bc60fa28 100644
--- a/posthog/api/insight.py
+++ b/posthog/api/insight.py
@@ -104,6 +104,8 @@
relative_date_parse,
str_to_bool,
)
+from loginas.utils import is_impersonated_session
+
logger = structlog.get_logger(__name__)
@@ -115,6 +117,7 @@
def log_insight_activity(
+ *,
activity: str,
insight: Insight,
insight_id: int,
@@ -122,6 +125,7 @@ def log_insight_activity(
organization_id: UUIDT,
team_id: int,
user: User,
+ was_impersonated: bool,
changes: Optional[List[Change]] = None,
) -> None:
"""
@@ -136,6 +140,7 @@ def log_insight_activity(
organization_id=organization_id,
team_id=team_id,
user=user,
+ was_impersonated=was_impersonated,
item_id=insight_id,
scope="Insight",
activity=activity,
@@ -343,6 +348,7 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> Insight:
organization_id=self.context["request"].user.current_organization_id,
team_id=team_id,
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(self.context["request"]),
)
return insight
@@ -409,6 +415,7 @@ def _log_insight_update(self, before_update, dashboards_before_change, updated_i
organization_id=self.context["request"].user.current_organization_id,
team_id=self.context["team_id"],
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(self.context["request"]),
changes=changes,
)
diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py
index d0f63e08220fa..718a0e36eab27 100644
--- a/posthog/api/notebook.py
+++ b/posthog/api/notebook.py
@@ -40,6 +40,7 @@
)
from posthog.settings import DEBUG
from posthog.utils import relative_date_parse
+from loginas.utils import is_impersonated_session
logger = structlog.get_logger(__name__)
@@ -62,6 +63,7 @@ def log_notebook_activity(
organization_id: UUIDT,
team_id: int,
user: User,
+ was_impersonated: bool,
changes: Optional[List[Change]] = None,
) -> None:
short_id = str(notebook.short_id)
@@ -70,6 +72,7 @@ def log_notebook_activity(
organization_id=organization_id,
team_id=team_id,
user=user,
+ was_impersonated=was_impersonated,
item_id=notebook.short_id,
scope="Notebook",
activity=activity,
@@ -139,6 +142,7 @@ def create(self, validated_data: Dict, *args, **kwargs) -> Notebook:
organization_id=self.context["request"].user.current_organization_id,
team_id=team.id,
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(request),
)
return notebook
@@ -173,6 +177,7 @@ def update(self, instance: Notebook, validated_data: Dict, **kwargs) -> Notebook
organization_id=self.context["request"].user.current_organization_id,
team_id=self.context["team_id"],
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(self.context["request"]),
changes=changes,
)
diff --git a/posthog/api/person.py b/posthog/api/person.py
index 390f4f82a4c98..91646e9d23920 100644
--- a/posthog/api/person.py
+++ b/posthog/api/person.py
@@ -95,6 +95,7 @@
)
from prometheus_client import Counter
from posthog.metrics import LABEL_TEAM_ID
+from loginas.utils import is_impersonated_session
DEFAULT_PAGE_LIMIT = 100
# Sync with .../lib/constants.tsx and .../ingestion/hooks.ts
@@ -396,7 +397,7 @@ def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response.
),
],
)
- def destroy(self, request: request.Request, pk=None, **kwargs): # type: ignore
+ def destroy(self, request: request.Request, pk=None, **kwargs):
try:
person = self.get_object()
person_id = person.id
@@ -406,6 +407,7 @@ def destroy(self, request: request.Request, pk=None, **kwargs): # type: ignore
organization_id=self.organization.id,
team_id=self.team_id,
user=cast(User, request.user),
+ was_impersonated=is_impersonated_session(request),
item_id=person_id,
scope="Person",
activity="deleted",
@@ -441,7 +443,7 @@ def values(self, request: request.Request, **kwargs) -> response.Response:
# Try loading as json for dicts or arrays
flattened.append(
{
- "name": convert_property_value(json.loads(value)), # type: ignore
+ "name": convert_property_value(json.loads(value)),
"count": count,
}
)
@@ -481,7 +483,8 @@ def split(self, request: request.Request, pk=None, **kwargs) -> response.Respons
log_activity(
organization_id=self.organization.id,
team_id=self.team.id,
- user=request.user, # type: ignore
+ user=request.user,
+ was_impersonated=is_impersonated_session(request),
item_id=person.id,
scope="Person",
activity="split_person",
@@ -572,7 +575,8 @@ def delete_property(self, request: request.Request, pk=None, **kwargs) -> respon
log_activity(
organization_id=self.organization.id,
team_id=self.team.id,
- user=request.user, # type: ignore
+ user=request.user,
+ was_impersonated=is_impersonated_session(request),
item_id=person.id,
scope="Person",
activity="delete_property",
@@ -675,6 +679,7 @@ def _set_properties(self, properties, user):
organization_id=self.organization.id,
team_id=self.team.id,
user=user,
+ was_impersonated=is_impersonated_session(self.request),
item_id=instance.pk,
scope="Person",
activity="updated",
@@ -921,14 +926,14 @@ def prepare_actor_query_filter(filter: T) -> T:
"key": "name",
"value": search,
"type": "group",
- "group_type_index": filter.aggregation_group_type_index, # type: ignore
+ "group_type_index": filter.aggregation_group_type_index,
"operator": "icontains",
},
{
"key": "slug",
"value": search,
"type": "group",
- "group_type_index": filter.aggregation_group_type_index, # type: ignore
+ "group_type_index": filter.aggregation_group_type_index,
"operator": "icontains",
},
]
diff --git a/posthog/api/plugin.py b/posthog/api/plugin.py
index 889d6d3e0d8d6..a64c233b2daa5 100644
--- a/posthog/api/plugin.py
+++ b/posthog/api/plugin.py
@@ -51,6 +51,7 @@
from posthog.redis import get_client
from posthog.utils import format_query_params_absolute_url
+
# Keep this in sync with: frontend/scenes/plugins/utils.ts
SECRET_FIELD_VALUE = "**************** POSTHOG SECRET FIELD ****************"
@@ -60,11 +61,11 @@ def _update_plugin_attachments(request: request.Request, plugin_config: PluginCo
for key, file in request.FILES.items():
match = re.match(r"^add_attachment\[([^]]+)\]$", key)
if match:
- _update_plugin_attachment(plugin_config, match.group(1), file, user)
+ _update_plugin_attachment(request, plugin_config, match.group(1), file, user)
for key, _file in request.POST.items():
match = re.match(r"^remove_attachment\[([^]]+)\]$", key)
if match:
- _update_plugin_attachment(plugin_config, match.group(1), None, user)
+ _update_plugin_attachment(request, plugin_config, match.group(1), None, user)
def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str, Any], secret_fields=[]) -> List[Change]:
@@ -82,13 +83,16 @@ def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str,
return config_changes
-def log_enabled_change_activity(new_plugin_config: PluginConfig, old_enabled: bool, user: User, changes=[]):
+def log_enabled_change_activity(
+ new_plugin_config: PluginConfig, old_enabled: bool, user: User, was_impersonated: bool, changes=[]
+):
if old_enabled != new_plugin_config.enabled:
log_activity(
organization_id=new_plugin_config.team.organization.id,
# Users in an org but not yet in a team can technically manage plugins via the API
team_id=new_plugin_config.team.id,
user=user,
+ was_impersonated=was_impersonated,
item_id=new_plugin_config.id,
scope="PluginConfig",
activity="enabled" if not old_enabled else "disabled",
@@ -102,6 +106,7 @@ def log_config_update_activity(
secret_fields: Set[str],
old_enabled: bool,
user: User,
+ was_impersonated: bool,
):
config_changes = get_plugin_config_changes(
old_config=old_config,
@@ -115,16 +120,21 @@ def log_config_update_activity(
# Users in an org but not yet in a team can technically manage plugins via the API
team_id=new_plugin_config.team.id,
user=user,
+ was_impersonated=was_impersonated,
item_id=new_plugin_config.id,
scope="PluginConfig",
activity="config_updated",
detail=Detail(name=new_plugin_config.plugin.name, changes=config_changes),
)
- log_enabled_change_activity(new_plugin_config=new_plugin_config, old_enabled=old_enabled, user=user)
+ log_enabled_change_activity(
+ new_plugin_config=new_plugin_config, old_enabled=old_enabled, user=user, was_impersonated=was_impersonated
+ )
-def _update_plugin_attachment(plugin_config: PluginConfig, key: str, file: Optional[UploadedFile], user: User):
+def _update_plugin_attachment(
+ request: request.Request, plugin_config: PluginConfig, key: str, file: Optional[UploadedFile], user: User
+):
try:
plugin_attachment = PluginAttachment.objects.get(team=plugin_config.team, plugin_config=plugin_config, key=key)
if file:
@@ -170,6 +180,7 @@ def _update_plugin_attachment(plugin_config: PluginConfig, key: str, file: Optio
organization_id=plugin_config.team.organization.id,
team_id=plugin_config.team.id,
user=user,
+ was_impersonated=is_impersonated_session(request),
item_id=plugin_config.id,
scope="PluginConfig",
activity=activity,
@@ -487,6 +498,7 @@ def destroy(self, request: request.Request, *args, **kwargs) -> Response:
# Users in an org but not yet in a team can technically manage plugins via the API
team_id=user.team.id if user.team else 0, # type: ignore
user=user, # type: ignore
+ was_impersonated=is_impersonated_session(self.request),
item_id=instance_id,
scope="Plugin",
activity="uninstalled",
@@ -505,6 +517,7 @@ def perform_create(self, serializer):
# Users in an org but not yet in a team can technically manage plugins via the API
team_id=user.team.id if user.team else 0,
user=user,
+ was_impersonated=is_impersonated_session(self.request),
item_id=serializer.instance.id,
scope="Plugin",
activity="installed",
@@ -657,6 +670,7 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> PluginConfi
secret_fields=_get_secret_fields_for_plugin(plugin_config.plugin),
),
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(self.context["request"]),
)
_update_plugin_attachments(self.context["request"], plugin_config)
@@ -694,6 +708,7 @@ def update( # type: ignore
old_enabled=old_enabled,
secret_fields=secret_fields,
user=self.context["request"].user,
+ was_impersonated=is_impersonated_session(self.context["request"]),
)
_update_plugin_attachments(self.context["request"], plugin_config)
@@ -754,6 +769,7 @@ def rearrange(self, request: request.Request, **kwargs):
# Users in an org but not yet in a team can technically manage plugins via the API
team_id=self.team.id,
user=request.user, # type: ignore
+ was_impersonated=is_impersonated_session(self.request),
item_id=plugin_config.id,
scope="Plugin", # use the type plugin so we can also provide unified history
activity="order_changed",
@@ -820,6 +836,7 @@ def job(self, request: request.Request, **kwargs):
# Users in an org but not yet in a team can technically manage plugins via the API
team_id=self.team.pk,
user=request.user, # type: ignore
+ was_impersonated=is_impersonated_session(self.request),
item_id=plugin_config_id,
scope="PluginConfig", # use the type plugin so we can also provide unified history
activity="job_triggered",
diff --git a/posthog/api/property_definition.py b/posthog/api/property_definition.py
index 6da00daecf445..8cde3479894b3 100644
--- a/posthog/api/property_definition.py
+++ b/posthog/api/property_definition.py
@@ -31,6 +31,7 @@
OrganizationMemberPermissions,
TeamMemberAccessPermission,
)
+from loginas.utils import is_impersonated_session
class SeenTogetherQuerySerializer(serializers.Serializer):
@@ -468,13 +469,13 @@ def get_queryset(self):
property_definition_fields = ", ".join(
[
- f'posthog_propertydefinition."{f.column}"' # type: ignore
+ f'posthog_propertydefinition."{f.column}"'
for f in PropertyDefinition._meta.get_fields()
if hasattr(f, "column")
]
)
- use_enterprise_taxonomy = self.request.user.organization.is_feature_available( # type: ignore
+ use_enterprise_taxonomy = self.request.user.organization.is_feature_available(
AvailableFeature.INGESTION_TAXONOMY
)
order_by_verified = False
@@ -485,9 +486,9 @@ def get_queryset(self):
# Prevent fetching deprecated `tags` field. Tags are separately fetched in TaggedItemSerializerMixin
property_definition_fields = ", ".join(
[
- f'{f.cached_col.alias}."{f.column}"' # type: ignore
+ f'{f.cached_col.alias}."{f.column}"'
for f in EnterprisePropertyDefinition._meta.get_fields()
- if hasattr(f, "column") and f.column not in ["deprecated_tags", "tags"] # type: ignore
+ if hasattr(f, "column") and f.column not in ["deprecated_tags", "tags"]
]
)
@@ -502,8 +503,8 @@ def get_queryset(self):
except ImportError:
use_enterprise_taxonomy = False
- limit = self.paginator.get_limit(self.request) # type: ignore
- offset = self.paginator.get_offset(self.request) # type: ignore
+ limit = self.paginator.get_limit(self.request)
+ offset = self.paginator.get_offset(self.request)
query = PropertyDefinitionQuerySerializer(data=self.request.query_params)
query.is_valid(raise_exception=True)
@@ -547,13 +548,13 @@ def get_queryset(self):
cursor.execute(query_context.as_count_sql(), query_context.params)
full_count = cursor.fetchone()[0]
- self.paginator.set_count(full_count) # type: ignore
+ self.paginator.set_count(full_count)
return queryset.raw(query_context.as_sql(order_by_verified), params=query_context.params)
def get_serializer_class(self) -> Type[serializers.ModelSerializer]:
serializer_class = self.serializer_class
- if self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY): # type: ignore
+ if self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY):
try:
from ee.api.ee_property_definition import (
EnterprisePropertyDefinitionSerializer,
@@ -561,12 +562,12 @@ def get_serializer_class(self) -> Type[serializers.ModelSerializer]:
except ImportError:
pass
else:
- serializer_class = EnterprisePropertyDefinitionSerializer # type: ignore
+ serializer_class = EnterprisePropertyDefinitionSerializer
return serializer_class
def get_object(self):
id = self.kwargs["id"]
- if self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY): # type: ignore
+ if self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY):
try:
from ee.models.property_definition import EnterprisePropertyDefinition
except ImportError:
@@ -625,6 +626,7 @@ def destroy(self, request: request.Request, *args: Any, **kwargs: Any) -> respon
organization_id=cast(UUIDT, self.organization_id),
team_id=self.team_id,
user=cast(User, request.user),
+ was_impersonated=is_impersonated_session(self.request),
item_id=instance_id,
scope="PropertyDefinition",
activity="deleted",
diff --git a/posthog/api/sharing.py b/posthog/api/sharing.py
index babc3a3ecfad4..1259b4418d7de 100644
--- a/posthog/api/sharing.py
+++ b/posthog/api/sharing.py
@@ -33,6 +33,7 @@
from posthog.session_recordings.session_recording_api import SessionRecordingSerializer
from posthog.user_permissions import UserPermissions
from posthog.utils import render_template
+from loginas.utils import is_impersonated_session
def shared_url_as_png(url: str = "") -> str:
@@ -181,6 +182,7 @@ def patch(self, request: Request, *args: Any, **kwargs: Any) -> response.Respons
organization_id=None,
team_id=self.team_id,
user=cast(User, self.request.user),
+ was_impersonated=is_impersonated_session(self.request),
item_id=instance.insight.pk,
scope="Insight",
activity="sharing " + ("enabled" if serializer.data.get("enabled") else "disabled"),
@@ -213,8 +215,8 @@ class SharingViewerPageViewSet(mixins.RetrieveModelMixin, StructuredViewSetMixin
4. Export downloading - used to download the actual content of an export if requested with the correct extension
"""
- authentication_classes = [] # type: ignore
- permission_classes = [] # type: ignore
+ authentication_classes = []
+ permission_classes = []
include_in_docs = False
def get_object(self) -> Optional[SharingConfiguration | ExportedAsset]:
diff --git a/posthog/api/test/__snapshots__/test_insight.ambr b/posthog/api/test/__snapshots__/test_insight.ambr
index 802090b19aaba..0dbd61eb66823 100644
--- a/posthog/api/test/__snapshots__/test_insight.ambr
+++ b/posthog/api/test/__snapshots__/test_insight.ambr
@@ -1541,6 +1541,24 @@
LIMIT 21 /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/
'''
# ---
+# name: TestInsight.test_listing_insights_does_not_nplus1.30
+ '''
+ SELECT "posthog_taggeditem"."id",
+ "posthog_taggeditem"."tag_id",
+ "posthog_taggeditem"."dashboard_id",
+ "posthog_taggeditem"."insight_id",
+ "posthog_taggeditem"."event_definition_id",
+ "posthog_taggeditem"."property_definition_id",
+ "posthog_taggeditem"."action_id",
+ "posthog_taggeditem"."feature_flag_id"
+ FROM "posthog_taggeditem"
+ WHERE "posthog_taggeditem"."insight_id" IN (1,
+ 2,
+ 3,
+ 4,
+ 5 /* ... */) /*controller='project_insights-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/insights/%3F%24'*/
+ '''
+# ---
# name: TestInsight.test_listing_insights_does_not_nplus1.4
'''
SELECT "posthog_team"."id",
diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
index 5d23b9a237a64..b8250ce4d7cec 100644
--- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
+++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr
@@ -8294,6 +8294,17 @@
LIMIT 1 /*controller='project_dashboards-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%28%3FP%3Cpk%3E%5B%5E/.%5D%2B%29/%3F%24'*/
'''
# ---
+# name: TestDashboard.test_loading_individual_dashboard_does_not_prefetch_all_possible_tiles.315
+ '''
+ SELECT "posthog_instancesetting"."id",
+ "posthog_instancesetting"."key",
+ "posthog_instancesetting"."raw_value"
+ FROM "posthog_instancesetting"
+ WHERE "posthog_instancesetting"."key" = 'constance:posthog:PERSON_ON_EVENTS_ENABLED'
+ ORDER BY "posthog_instancesetting"."id" ASC
+ LIMIT 1 /*controller='project_dashboards-detail',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%28%3FP%3Cpk%3E%5B%5E/.%5D%2B%29/%3F%24'*/
+ '''
+# ---
# name: TestDashboard.test_loading_individual_dashboard_does_not_prefetch_all_possible_tiles.32
'''
SELECT "posthog_team"."id",
diff --git a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
index ff161a2839f81..550b197d0f5ca 100644
--- a/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
+++ b/posthog/api/test/notebooks/__snapshots__/test_notebook.ambr
@@ -290,6 +290,7 @@
"posthog_activitylog"."team_id",
"posthog_activitylog"."organization_id",
"posthog_activitylog"."user_id",
+ "posthog_activitylog"."was_impersonated",
"posthog_activitylog"."is_system",
"posthog_activitylog"."activity",
"posthog_activitylog"."item_id",
diff --git a/posthog/api/test/test_app_metrics.py b/posthog/api/test/test_app_metrics.py
index 7d3f2a4aa9bf0..32ae14f01edc6 100644
--- a/posthog/api/test/test_app_metrics.py
+++ b/posthog/api/test/test_app_metrics.py
@@ -233,11 +233,12 @@ def test_error_details(self):
def _create_activity_log(self, **kwargs):
log_activity(
- **{
+ **{ # Using dict form so that kwargs can override these defaults
"organization_id": self.team.organization.id,
"team_id": self.team.pk,
"user": self.user,
"item_id": self.plugin_config.id,
+ "was_impersonated": False,
"scope": "PluginConfig",
**kwargs,
}
diff --git a/posthog/api/test/test_capture.py b/posthog/api/test/test_capture.py
index 886097f71fae9..73654abf7a269 100644
--- a/posthog/api/test/test_capture.py
+++ b/posthog/api/test/test_capture.py
@@ -881,7 +881,7 @@ def test_batch(self, kafka_produce):
"distinct_id": "2",
"ip": "127.0.0.1",
"site_url": "http://testserver",
- "data": {**data, "properties": {}}, # type: ignore
+ "data": {**data, "properties": {}},
"token": self.team.api_token,
},
)
diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py
index 839131875a03d..a10e06de69e5a 100644
--- a/posthog/api/test/test_decide.py
+++ b/posthog/api/test/test_decide.py
@@ -2139,6 +2139,39 @@ def test_flag_with_regular_cohorts(self, *args):
self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False})
self.assertEqual(response.json()["errorsWhileComputingFlags"], False)
+ def test_flag_with_unknown_cohort(self, *args):
+ self.team.app_urls = ["https://example.com"]
+ self.team.save()
+ self.client.logout()
+
+ Person.objects.create(
+ team=self.team,
+ distinct_ids=["example_id_1"],
+ properties={"$some_prop_1": "something_1"},
+ )
+
+ FeatureFlag.objects.create(
+ team=self.team,
+ filters={"groups": [{"properties": [{"key": "id", "value": 99999, "type": "cohort"}]}]},
+ name="This is a cohort-based flag",
+ key="cohort-flag",
+ created_by=self.user,
+ )
+ FeatureFlag.objects.create(
+ team=self.team,
+ filters={"groups": [{"properties": [], "rollout_percentage": 100}]},
+ name="This is a regular flag",
+ key="simple-flag",
+ created_by=self.user,
+ )
+
+ with self.assertNumQueries(7):
+ # multiple queries to get the same cohort, because it doesn't exist
+ # TODO: Find a better way to optimise this in cache
+ response = self._post_decide(api_version=3, distinct_id="example_id_1")
+ self.assertEqual(response.json()["featureFlags"], {"cohort-flag": False, "simple-flag": True})
+ self.assertEqual(response.json()["errorsWhileComputingFlags"], False)
+
@snapshot_postgres_queries
def test_flag_with_behavioural_cohorts(self, *args):
self.team.app_urls = ["https://example.com"]
diff --git a/posthog/api/user.py b/posthog/api/user.py
index df5ea0c3d92ee..df3b3eff4a962 100644
--- a/posthog/api/user.py
+++ b/posthog/api/user.py
@@ -133,7 +133,7 @@ def get_is_impersonated(self, _) -> Optional[bool]:
return is_impersonated_session(self.context["request"])
def get_has_social_auth(self, instance: User) -> bool:
- return instance.social_auth.exists() # type: ignore
+ return instance.social_auth.exists()
def get_is_2fa_enabled(self, instance: User) -> bool:
return default_device(instance) is not None
@@ -167,7 +167,7 @@ def validate_notification_settings(self, notification_settings: Notifications) -
raise serializers.ValidationError(
f"{value} is not a valid type for notification settings, should be {Notifications.__annotations__[key]}"
)
- return {**NOTIFICATION_DEFAULTS, **notification_settings} # type: ignore
+ return {**NOTIFICATION_DEFAULTS, **notification_settings}
def validate_password_change(
self, instance: User, current_password: Optional[str], password: Optional[str]
diff --git a/posthog/api/utils.py b/posthog/api/utils.py
index 1953b86029c16..3c518a81b1f62 100644
--- a/posthog/api/utils.py
+++ b/posthog/api/utils.py
@@ -257,12 +257,12 @@ def create_event_definitions_sql(
ee_model = EnterpriseEventDefinition
else:
- ee_model = EventDefinition # type: ignore
+ ee_model = EventDefinition
event_definition_fields = {
- f'"{f.column}"' # type: ignore
+ f'"{f.column}"'
for f in ee_model._meta.get_fields()
- if hasattr(f, "column") and f.column not in ["deprecated_tags", "tags"] # type: ignore
+ if hasattr(f, "column") and f.column not in ["deprecated_tags", "tags"]
}
enterprise_join = (
@@ -296,7 +296,7 @@ def create_event_definitions_sql(
def get_pk_or_uuid(queryset: QuerySet, key: Union[int, str]) -> QuerySet:
try:
# Test if value is a UUID
- UUID(key) # type: ignore
+ UUID(key)
return queryset.filter(uuid=key)
except ValueError:
return queryset.filter(pk=key)
diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py
index 38c9354df09e8..22b3cc0cd9ec9 100644
--- a/posthog/batch_exports/http.py
+++ b/posthog/batch_exports/http.py
@@ -72,7 +72,7 @@ def validate_date_input(date_input: Any) -> dt.datetime:
# As far as I'm concerned, if you give me something that quacks like an isoformatted str, you are golden.
# Read more here: https://github.com/python/mypy/issues/2420.
# Once PostHog is 3.11, try/except is zero cost if nothing is raised: https://bugs.python.org/issue40222.
- parsed = dt.datetime.fromisoformat(date_input.replace("Z", "+00:00")) # type: ignore
+ parsed = dt.datetime.fromisoformat(date_input.replace("Z", "+00:00"))
except (TypeError, ValueError):
raise ValidationError(f"Input {date_input} is not a valid ISO formatted datetime.")
return parsed
diff --git a/posthog/hogql_queries/actors_query_runner.py b/posthog/hogql_queries/actors_query_runner.py
index b0fd3b96c1ae3..edfe06248595a 100644
--- a/posthog/hogql_queries/actors_query_runner.py
+++ b/posthog/hogql_queries/actors_query_runner.py
@@ -81,11 +81,20 @@ def input_columns(self) -> List[str]:
return self.strategy.input_columns()
- def source_id_column(self, source_query: ast.SelectQuery) -> List[str]:
+ # TODO: Figure out a more sure way of getting the actor id than using the alias or chain name
+ def source_id_column(self, source_query: ast.SelectQuery | ast.SelectUnionQuery) -> List[str]:
# Figure out the id column of the source query, first column that has id in the name
- for column in source_query.select:
- if isinstance(column, ast.Field) and any("id" in part.lower() for part in column.chain):
- return column.chain
+ if isinstance(source_query, ast.SelectQuery):
+ select = source_query.select
+ else:
+ select = source_query.select_queries[0].select
+
+ for column in select:
+ if isinstance(column, ast.Alias) and "id" in column.alias:
+ return [column.alias]
+
+ if isinstance(column, ast.Field) and any("id" in str(part).lower() for part in column.chain):
+ return [str(part) for part in column.chain]
raise ValueError("Source query must have an id column")
def source_table_join(self) -> ast.JoinExpr:
diff --git a/posthog/hogql_queries/insights/insight_actors_query_runner.py b/posthog/hogql_queries/insights/insight_actors_query_runner.py
index 4a5c437824d7e..1c17139ca91bd 100644
--- a/posthog/hogql_queries/insights/insight_actors_query_runner.py
+++ b/posthog/hogql_queries/insights/insight_actors_query_runner.py
@@ -5,6 +5,7 @@
from posthog.hogql.query import execute_hogql_query
from posthog.hogql_queries.insights.lifecycle_query_runner import LifecycleQueryRunner
from posthog.hogql_queries.insights.retention_query_runner import RetentionQueryRunner
+from posthog.hogql_queries.insights.stickiness_query_runner import StickinessQueryRunner
from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner
from posthog.hogql_queries.query_runner import QueryRunner, get_query_runner
from posthog.models.filters.mixins.utils import cached_property
@@ -24,13 +25,18 @@ def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery:
lifecycle_runner = cast(LifecycleQueryRunner, self.source_runner)
day = self.query.day
status = self.query.status
- return lifecycle_runner.to_actors_query(day=day, status=status)
+ return lifecycle_runner.to_actors_query(day=str(day) if day else None, status=status)
elif isinstance(self.source_runner, TrendsQueryRunner):
trends_runner = cast(TrendsQueryRunner, self.source_runner)
return trends_runner.to_actors_query()
elif isinstance(self.source_runner, RetentionQueryRunner):
retention_runner = cast(RetentionQueryRunner, self.source_runner)
return retention_runner.to_actors_query(interval=self.query.interval)
+ elif isinstance(self.source_runner, StickinessQueryRunner):
+ stickiness_runner = cast(StickinessQueryRunner, self.source_runner)
+ return stickiness_runner.to_actors_query(
+ interval_num=int(self.query.day) if self.query.day is not None else None
+ )
raise ValueError(f"Cannot convert source query of type {self.query.source.kind} to persons query")
diff --git a/posthog/hogql_queries/insights/stickiness_query_runner.py b/posthog/hogql_queries/insights/stickiness_query_runner.py
index 76a2e11c38f68..80796d94649a9 100644
--- a/posthog/hogql_queries/insights/stickiness_query_runner.py
+++ b/posthog/hogql_queries/insights/stickiness_query_runner.py
@@ -17,6 +17,7 @@
from posthog.hogql.timings import HogQLTimings
from posthog.hogql_queries.query_runner import QueryRunner
from posthog.hogql_queries.utils.query_date_range import QueryDateRange
+from posthog.hogql_queries.utils.query_previous_period_date_range import QueryPreviousPeriodDateRange
from posthog.models import Team
from posthog.models.action.action import Action
from posthog.models.filters.mixins.utils import cached_property
@@ -29,9 +30,23 @@
)
+class SeriesWithExtras:
+ series: EventsNode | ActionsNode
+ is_previous_period_series: Optional[bool]
+
+ def __init__(
+ self,
+ series: EventsNode | ActionsNode,
+ is_previous_period_series: Optional[bool],
+ ):
+ self.series = series
+ self.is_previous_period_series = is_previous_period_series
+
+
class StickinessQueryRunner(QueryRunner):
query: StickinessQuery
query_type = StickinessQuery
+ series: List[SeriesWithExtras]
def __init__(
self,
@@ -42,6 +57,7 @@ def __init__(
limit_context: Optional[LimitContext] = None,
):
super().__init__(query, team=team, timings=timings, modifiers=modifiers, limit_context=limit_context)
+ self.series = self.setup_series()
def _is_stale(self, cached_result_package):
date_to = self.query_date_range.date_to()
@@ -65,15 +81,41 @@ def _refresh_frequency(self):
return refresh_frequency
- def to_query(self) -> List[ast.SelectQuery]: # type: ignore
- interval_subtract = ast.Call(
- name=f"toInterval{self.query_date_range.interval_name.capitalize()}",
- args=[ast.Constant(value=2)],
+ def _events_query(self, series_with_extra: SeriesWithExtras) -> ast.SelectQuery:
+ select_query = parse_select(
+ """
+ SELECT count(DISTINCT aggregation_target) as aggregation_target, num_intervals
+ FROM (
+ SELECT e.person_id as aggregation_target, count(DISTINCT toStartOfDay(e.timestamp)) as num_intervals
+ FROM events e
+ SAMPLE {sample}
+ WHERE {where_clause}
+ GROUP BY aggregation_target
+ )
+ WHERE num_intervals <= {num_intervals}
+ GROUP BY num_intervals
+ ORDER BY num_intervals
+ """,
+ placeholders={
+ "where_clause": self.where_clause(series_with_extra),
+ "num_intervals": ast.Constant(value=self.intervals_num()),
+ "sample": self._sample_value(),
+ },
)
+ return cast(ast.SelectQuery, select_query)
+
+ def to_query(self) -> List[ast.SelectQuery]: # type: ignore
queries = []
- for series in self.query.series:
+ for series in self.series:
+ date_range = self.date_range(series)
+
+ interval_subtract = ast.Call(
+ name=f"toInterval{date_range.interval_name.capitalize()}",
+ args=[ast.Constant(value=2)],
+ )
+
select_query = parse_select(
"""
SELECT groupArray(aggregation_target), groupArray(num_intervals)
@@ -83,28 +125,16 @@ def to_query(self) -> List[ast.SelectQuery]: # type: ignore
SELECT 0 as aggregation_target, (number + 1) as num_intervals
FROM numbers(dateDiff({interval}, {date_from} - {interval_subtract}, {date_to}))
UNION ALL
- SELECT count(DISTINCT aggregation_target) as aggregation_target, num_intervals
- FROM (
- SELECT e.person_id as aggregation_target, count(DISTINCT toStartOfDay(e.timestamp)) as num_intervals
- FROM events e
- SAMPLE {sample}
- WHERE {where_clause}
- GROUP BY aggregation_target
- )
- WHERE num_intervals <= {num_intervals}
- GROUP BY num_intervals
- ORDER BY num_intervals
+ {events_query}
)
GROUP BY num_intervals
ORDER BY num_intervals
)
""",
placeholders={
- **self.query_date_range.to_placeholders(),
- "where_clause": self.where_clause(series),
- "num_intervals": ast.Constant(value=self.intervals_num()),
+ **date_range.to_placeholders(),
"interval_subtract": interval_subtract,
- "sample": self._sample_value(),
+ "events_query": self._events_query(series),
},
)
@@ -112,8 +142,26 @@ def to_query(self) -> List[ast.SelectQuery]: # type: ignore
return queries
- def to_actors_query(self) -> ast.SelectQuery | ast.SelectUnionQuery:
- return ast.SelectUnionQuery(select_queries=[])
+ def to_actors_query(self, interval_num: Optional[int] = None) -> ast.SelectQuery | ast.SelectUnionQuery:
+ queries: List[ast.SelectQuery] = []
+
+ for series in self.series:
+ events_query = self._events_query(series)
+ events_query.select = [ast.Alias(alias="person_id", expr=ast.Field(chain=["aggregation_target"]))]
+ events_query.group_by = None
+ events_query.order_by = None
+
+ # Scope down to the individual day
+ if interval_num is not None:
+ events_query.where = ast.CompareOperation(
+ left=ast.Field(chain=["num_intervals"]),
+ op=ast.CompareOperationOp.Eq,
+ right=ast.Constant(value=interval_num),
+ )
+
+ queries.append(events_query)
+
+ return ast.SelectUnionQuery(select_queries=queries)
def calculate(self):
queries = self.to_query()
@@ -134,8 +182,10 @@ def calculate(self):
timings.extend(response.timings)
for val in response.results or []:
+ series_with_extra = self.series[index]
+
try:
- series_label = self.series_event(self.query.series[index])
+ series_label = self.series_event(series_with_extra.series)
except Action.DoesNotExist:
# Dont append the series if the action doesnt exist
continue
@@ -152,11 +202,20 @@ def calculate(self):
],
}
+ # Modifications for when comparing to previous period
+ if self.query.stickinessFilter is not None and self.query.stickinessFilter.compare:
+ series_object["compare"] = True
+ series_object["compare_label"] = (
+ "previous" if series_with_extra.is_previous_period_series else "current"
+ )
+
res.append(series_object)
return StickinessQueryResponse(results=res, timings=timings)
- def where_clause(self, series: EventsNode | ActionsNode) -> ast.Expr:
+ def where_clause(self, series_with_extra: SeriesWithExtras) -> ast.Expr:
+ date_range = self.date_range(series_with_extra)
+ series = series_with_extra.series
filters: List[ast.Expr] = []
# Dates
@@ -164,11 +223,11 @@ def where_clause(self, series: EventsNode | ActionsNode) -> ast.Expr:
[
parse_expr(
"timestamp >= {date_from_with_adjusted_start_of_interval}",
- placeholders=self.query_date_range.to_placeholders(),
+ placeholders=date_range.to_placeholders(),
),
parse_expr(
"timestamp <= {date_to}",
- placeholders=self.query_date_range.to_placeholders(),
+ placeholders=date_range.to_placeholders(),
),
]
)
@@ -233,6 +292,40 @@ def intervals_num(self):
delta = self.query_date_range.date_to() - self.query_date_range.date_from()
return delta.days + 2
+ def setup_series(self) -> List[SeriesWithExtras]:
+ series_with_extras = [
+ SeriesWithExtras(
+ series,
+ None,
+ )
+ for series in self.query.series
+ ]
+
+ if self.query.stickinessFilter is not None and self.query.stickinessFilter.compare:
+ updated_series = []
+ for series in series_with_extras:
+ updated_series.append(
+ SeriesWithExtras(
+ series=series.series,
+ is_previous_period_series=False,
+ )
+ )
+ updated_series.append(
+ SeriesWithExtras(
+ series=series.series,
+ is_previous_period_series=True,
+ )
+ )
+ series_with_extras = updated_series
+
+ return series_with_extras
+
+ def date_range(self, series: SeriesWithExtras):
+ if series.is_previous_period_series:
+ return self.query_previous_date_range
+
+ return self.query_date_range
+
@cached_property
def query_date_range(self):
return QueryDateRange(
@@ -241,3 +334,12 @@ def query_date_range(self):
interval=self.query.interval,
now=datetime.now(),
)
+
+ @cached_property
+ def query_previous_date_range(self):
+ return QueryPreviousPeriodDateRange(
+ date_range=self.query.dateRange,
+ team=self.team,
+ interval=self.query.interval,
+ now=datetime.now(),
+ )
diff --git a/posthog/hogql_queries/query_runner.py b/posthog/hogql_queries/query_runner.py
index 8814d581890c1..fc41caa9fb9ce 100644
--- a/posthog/hogql_queries/query_runner.py
+++ b/posthog/hogql_queries/query_runner.py
@@ -102,7 +102,7 @@ def get_query_runner(
if isinstance(query, dict):
kind = query.get("kind", None)
elif hasattr(query, "kind"):
- kind = query.kind # type: ignore
+ kind = query.kind
else:
raise ValueError(f"Can't get a runner for an unknown query type: {query}")
@@ -232,7 +232,7 @@ def __init__(
self.limit_context = limit_context or LimitContext.QUERY
self.modifiers = create_default_modifiers_for_team(team, modifiers)
if isinstance(query, self.query_type):
- self.query = query # type: ignore
+ self.query = query
else:
self.query = self.query_type.model_validate(query)
@@ -272,7 +272,7 @@ def run(self, refresh_requested: Optional[bool] = None) -> CachedQueryResponse:
return fresh_response
@abstractmethod
- def to_query(self) -> ast.SelectQuery:
+ def to_query(self) -> ast.SelectQuery | ast.SelectUnionQuery:
raise NotImplementedError()
def to_actors_query(self) -> ast.SelectQuery | ast.SelectUnionQuery:
diff --git a/posthog/hogql_queries/test/test_query_runner.py b/posthog/hogql_queries/test/test_query_runner.py
index 28a0d47036778..715bae457fe61 100644
--- a/posthog/hogql_queries/test/test_query_runner.py
+++ b/posthog/hogql_queries/test/test_query_runner.py
@@ -23,7 +23,7 @@ class TestQuery(BaseModel):
class TestQueryRunner(BaseTest):
- def setup_test_query_runner_class(self, query_class: Type[RunnableQueryNode] = TestQuery): # type: ignore
+ def setup_test_query_runner_class(self, query_class: Type[RunnableQueryNode] = TestQuery):
"""Setup required methods and attributes of the abstract base class."""
class TestQueryRunner(QueryRunner):
@@ -47,21 +47,21 @@ def _is_stale(self, cached_result_package) -> bool:
def test_init_with_query_instance(self):
TestQueryRunner = self.setup_test_query_runner_class()
- runner = TestQueryRunner(query=TestQuery(some_attr="bla"), team=self.team) # type: ignore
+ runner = TestQueryRunner(query=TestQuery(some_attr="bla"), team=self.team)
self.assertEqual(runner.query, TestQuery(some_attr="bla"))
def test_init_with_query_dict(self):
TestQueryRunner = self.setup_test_query_runner_class()
- runner = TestQueryRunner(query={"some_attr": "bla"}, team=self.team) # type: ignore
+ runner = TestQueryRunner(query={"some_attr": "bla"}, team=self.team)
self.assertEqual(runner.query, TestQuery(some_attr="bla"))
def test_serializes_to_json(self):
TestQueryRunner = self.setup_test_query_runner_class()
- runner = TestQueryRunner(query={"some_attr": "bla"}, team=self.team) # type: ignore
+ runner = TestQueryRunner(query={"some_attr": "bla"}, team=self.team)
json = runner.toJSON()
self.assertEqual(json, '{"some_attr":"bla"}')
@@ -80,7 +80,7 @@ def test_serializes_to_json_ignores_empty_dict(self):
# implement custom validators for this.
TestQueryRunner = self.setup_test_query_runner_class()
- runner = TestQueryRunner(query={"some_attr": "bla", "other_attr": []}, team=self.team) # type: ignore
+ runner = TestQueryRunner(query={"some_attr": "bla", "other_attr": []}, team=self.team)
json = runner.toJSON()
self.assertEqual(json, '{"some_attr":"bla"}')
@@ -90,7 +90,7 @@ def test_cache_key(self):
# set the pk directly as it affects the hash in the _cache_key call
team = Team.objects.create(pk=42, organization=self.organization)
- runner = TestQueryRunner(query={"some_attr": "bla"}, team=team) # type: ignore
+ runner = TestQueryRunner(query={"some_attr": "bla"}, team=team)
cache_key = runner._cache_key()
self.assertEqual(cache_key, "cache_b8a6b70478ec6139c8f7f379c808d5b9")
@@ -98,13 +98,13 @@ def test_cache_key(self):
def test_cache_key_runner_subclass(self):
TestQueryRunner = self.setup_test_query_runner_class()
- class TestSubclassQueryRunner(TestQueryRunner): # type: ignore
+ class TestSubclassQueryRunner(TestQueryRunner):
pass
# set the pk directly as it affects the hash in the _cache_key call
team = Team.objects.create(pk=42, organization=self.organization)
- runner = TestSubclassQueryRunner(query={"some_attr": "bla"}, team=team) # type: ignore
+ runner = TestSubclassQueryRunner(query={"some_attr": "bla"}, team=team)
cache_key = runner._cache_key()
self.assertEqual(cache_key, "cache_cfab9e42d088def74792922de5b513ac")
@@ -115,7 +115,7 @@ def test_cache_key_different_timezone(self):
team.timezone = "Europe/Vienna"
team.save()
- runner = TestQueryRunner(query={"some_attr": "bla"}, team=team) # type: ignore
+ runner = TestQueryRunner(query={"some_attr": "bla"}, team=team)
cache_key = runner._cache_key()
self.assertEqual(cache_key, "cache_9f12fefe07c0ab79e93935aed6b0bfa6")
@@ -123,7 +123,7 @@ def test_cache_key_different_timezone(self):
def test_cache_response(self):
TestQueryRunner = self.setup_test_query_runner_class()
- runner = TestQueryRunner(query={"some_attr": "bla"}, team=self.team) # type: ignore
+ runner = TestQueryRunner(query={"some_attr": "bla"}, team=self.team)
with freeze_time(datetime(2023, 2, 4, 13, 37, 42)):
# returns fresh response if uncached
diff --git a/posthog/management/commands/test_migrations_are_safe.py b/posthog/management/commands/test_migrations_are_safe.py
index 0dbf2534c8702..566533fd9fe69 100644
--- a/posthog/management/commands/test_migrations_are_safe.py
+++ b/posthog/management/commands/test_migrations_are_safe.py
@@ -40,6 +40,7 @@ def validate_migration_sql(sql) -> bool:
if (
re.findall(r"(? None:
+ if was_impersonated and user is None:
+ logger.warn(
+ "activity_log.failed_to_write_to_activity_log",
+ team=team_id,
+ organization_id=organization_id,
+ scope=scope,
+ activity=activity,
+ exception=ValueError("Cannot log impersonated activity without a user"),
+ )
+ return
try:
if activity == "updated" and (detail.changes is None or len(detail.changes) == 0) and not force_save:
logger.warn(
@@ -344,6 +357,7 @@ def log_activity(
organization_id=organization_id,
team_id=team_id,
user=user,
+ was_impersonated=was_impersonated,
is_system=user is None,
item_id=str(item_id),
scope=scope,
@@ -359,6 +373,10 @@ def log_activity(
activity=activity,
exception=e,
)
+ if settings.TEST:
+ # Re-raise in tests, so that we can catch failures in test suites - but keep quiet in production,
+ # as we currently don't treat activity logs as critical
+ raise e
@dataclasses.dataclass(frozen=True)
diff --git a/posthog/models/comment.py b/posthog/models/comment.py
index fdd3f63237883..a1fff705c0632 100644
--- a/posthog/models/comment.py
+++ b/posthog/models/comment.py
@@ -55,6 +55,7 @@ def log_comment_activity(sender, instance: Comment, created: bool, **kwargs):
organization_id=None,
team_id=instance.team_id,
user=instance.created_by,
+ was_impersonated=None, # TODO - Find way to determine if the user was impersonated
item_id=item_id,
scope=scope,
activity="commented",
diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py
index d81f44bd61807..90b64805d9ac2 100644
--- a/posthog/models/feature_flag/flag_matching.py
+++ b/posthog/models/feature_flag/flag_matching.py
@@ -940,8 +940,12 @@ def get_all_properties_with_math_operators(
if prop.type == "cohort":
cohort_id = int(cast(Union[str, int], prop.value))
if cohorts_cache.get(cohort_id) is None:
- cohorts_cache[cohort_id] = Cohort.objects.using(DATABASE_FOR_FLAG_MATCHING).get(pk=cohort_id)
- cohort = cohorts_cache[cohort_id]
+ queried_cohort = Cohort.objects.using(DATABASE_FOR_FLAG_MATCHING).filter(pk=cohort_id).first()
+ if queried_cohort:
+ cohorts_cache[cohort_id] = queried_cohort
+ cohort = queried_cohort
+ else:
+ cohort = cohorts_cache[cohort_id]
if cohort:
all_keys_and_fields.extend(
get_all_properties_with_math_operators(cohort.properties.flat, cohorts_cache)
diff --git a/posthog/models/filters/base_filter.py b/posthog/models/filters/base_filter.py
index 8b86de9b23129..10ca241ad8f16 100644
--- a/posthog/models/filters/base_filter.py
+++ b/posthog/models/filters/base_filter.py
@@ -62,7 +62,7 @@ def __init__(
self.hogql_context.person_on_events_mode = self.team.person_on_events_mode
if self.team and hasattr(self, "simplify") and not getattr(self, "is_simplified", False):
- simplified_filter = self.simplify(self.team) # type: ignore
+ simplified_filter = self.simplify(self.team)
self._data = simplified_filter._data
def to_dict(self) -> Dict[str, Any]:
diff --git a/posthog/models/filters/test/__snapshots__/test_filter.ambr b/posthog/models/filters/test/__snapshots__/test_filter.ambr
index 0d114111461b2..203c587fc0389 100644
--- a/posthog/models/filters/test/__snapshots__/test_filter.ambr
+++ b/posthog/models/filters/test/__snapshots__/test_filter.ambr
@@ -390,7 +390,8 @@
"posthog_cohort"."groups"
FROM "posthog_cohort"
WHERE "posthog_cohort"."id" = 2
- LIMIT 21
+ ORDER BY "posthog_cohort"."id" ASC
+ LIMIT 1
'''
# ---
# name: TestDjangoPropertyGroupToQ.test_property_group_to_q_with_cohorts.1
@@ -463,7 +464,8 @@
"posthog_cohort"."groups"
FROM "posthog_cohort"
WHERE "posthog_cohort"."id" = 2
- LIMIT 21
+ ORDER BY "posthog_cohort"."id" ASC
+ LIMIT 1
'''
# ---
# name: TestDjangoPropertyGroupToQ.test_property_group_to_q_with_cohorts_no_match.1
@@ -527,7 +529,8 @@
"posthog_cohort"."groups"
FROM "posthog_cohort"
WHERE "posthog_cohort"."id" = 2
- LIMIT 21
+ ORDER BY "posthog_cohort"."id" ASC
+ LIMIT 1
'''
# ---
# name: TestDjangoPropertyGroupToQ.test_property_group_to_q_with_negation_cohorts.1
@@ -551,7 +554,8 @@
"posthog_cohort"."groups"
FROM "posthog_cohort"
WHERE "posthog_cohort"."id" = 2
- LIMIT 21
+ ORDER BY "posthog_cohort"."id" ASC
+ LIMIT 1
'''
# ---
# name: TestDjangoPropertyGroupToQ.test_property_group_to_q_with_negation_cohorts.2
@@ -575,7 +579,8 @@
"posthog_cohort"."groups"
FROM "posthog_cohort"
WHERE "posthog_cohort"."id" = 2
- LIMIT 21
+ ORDER BY "posthog_cohort"."id" ASC
+ LIMIT 1
'''
# ---
# name: TestDjangoPropertyGroupToQ.test_property_group_to_q_with_negation_cohorts.3
@@ -599,7 +604,8 @@
"posthog_cohort"."groups"
FROM "posthog_cohort"
WHERE "posthog_cohort"."id" = 2
- LIMIT 21
+ ORDER BY "posthog_cohort"."id" ASC
+ LIMIT 1
'''
# ---
# name: TestDjangoPropertyGroupToQ.test_property_group_to_q_with_negation_cohorts.4
diff --git a/posthog/models/person/person.py b/posthog/models/person/person.py
index 902742219ab0a..a04565423335b 100644
--- a/posthog/models/person/person.py
+++ b/posthog/models/person/person.py
@@ -31,7 +31,7 @@ class Person(models.Model):
@property
def distinct_ids(self) -> List[str]:
if hasattr(self, "distinct_ids_cache"):
- return [id.distinct_id for id in self.distinct_ids_cache] # type: ignore
+ return [id.distinct_id for id in self.distinct_ids_cache]
if hasattr(self, "_distinct_ids") and self._distinct_ids:
return self._distinct_ids
return [
@@ -307,5 +307,5 @@ def get_distinct_ids_for_subquery(person: Person | None, team: Team) -> List[str
)
distinct_ids = first_ids.union(last_ids)
else:
- distinct_ids = [] # type: ignore
+ distinct_ids = []
return list(map(str, distinct_ids))
diff --git a/posthog/models/user.py b/posthog/models/user.py
index 04fa79ef3da45..3b9fa7f4a5a85 100644
--- a/posthog/models/user.py
+++ b/posthog/models/user.py
@@ -159,12 +159,12 @@ class User(AbstractUser, UUIDClassicModel):
events_column_config: models.JSONField = models.JSONField(default=events_column_config_default)
# Remove unused attributes from `AbstractUser`
- username = None # type: ignore
+ username = None
- objects: UserManager = UserManager() # type: ignore
+ objects: UserManager = UserManager()
@property
- def is_superuser(self) -> bool: # type: ignore
+ def is_superuser(self) -> bool:
return self.is_staff
@cached_property
@@ -244,7 +244,7 @@ def join(
@property
def notification_settings(self) -> Notifications:
return {
- **NOTIFICATION_DEFAULTS, # type: ignore
+ **NOTIFICATION_DEFAULTS,
**(self.partial_notification_settings if self.partial_notification_settings else {}),
}
@@ -312,8 +312,8 @@ def get_analytics_metadata(self):
"project_setup_complete": project_setup_complete,
"joined_at": self.date_joined,
"has_password_set": self.has_usable_password(),
- "has_social_auth": self.social_auth.exists(), # type: ignore
- "social_providers": list(self.social_auth.values_list("provider", flat=True)), # type: ignore
+ "has_social_auth": self.social_auth.exists(),
+ "social_providers": list(self.social_auth.values_list("provider", flat=True)),
"instance_url": SITE_URL,
"instance_tag": INSTANCE_TAG,
"is_email_verified": self.is_email_verified,
diff --git a/posthog/plugins/utils.py b/posthog/plugins/utils.py
index 45eeb5ca94843..2610d8b2eb17d 100644
--- a/posthog/plugins/utils.py
+++ b/posthog/plugins/utils.py
@@ -276,7 +276,7 @@ def get_file_from_zip_archive(archive: bytes, filename: str, *, json_parse: bool
return json.loads(file_bytes)
if isinstance(file_bytes, bytes):
return file_bytes.decode("utf-8")
- return str(file_bytes) # type: ignore
+ return str(file_bytes)
def get_file_from_tgz_archive(archive: bytes, filename, *, json_parse: bool) -> Any:
diff --git a/posthog/queries/actor_base_query.py b/posthog/queries/actor_base_query.py
index 75ff6ab0a97e9..6757a14f1cd97 100644
--- a/posthog/queries/actor_base_query.py
+++ b/posthog/queries/actor_base_query.py
@@ -115,7 +115,7 @@ def get_actors(
if (
hasattr(self._filter, "include_recordings")
- and self._filter.include_recordings # type: ignore
+ and self._filter.include_recordings
and self._filter.insight in [INSIGHT_PATHS, INSIGHT_TRENDS, INSIGHT_FUNNELS]
):
serialized_actors = self.add_matched_recordings_to_serialized_actors(serialized_actors, raw_result)
diff --git a/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr b/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr
index 2069be3bc686c..4f90b5aa41d7e 100644
--- a/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr
+++ b/posthog/queries/app_metrics/test/__snapshots__/test_historical_exports.ambr
@@ -52,6 +52,7 @@
"posthog_activitylog"."team_id",
"posthog_activitylog"."organization_id",
"posthog_activitylog"."user_id",
+ "posthog_activitylog"."was_impersonated",
"posthog_activitylog"."is_system",
"posthog_activitylog"."activity",
"posthog_activitylog"."item_id",
@@ -109,6 +110,7 @@
"posthog_activitylog"."team_id",
"posthog_activitylog"."organization_id",
"posthog_activitylog"."user_id",
+ "posthog_activitylog"."was_impersonated",
"posthog_activitylog"."is_system",
"posthog_activitylog"."activity",
"posthog_activitylog"."item_id",
@@ -164,6 +166,7 @@
"posthog_activitylog"."team_id",
"posthog_activitylog"."organization_id",
"posthog_activitylog"."user_id",
+ "posthog_activitylog"."was_impersonated",
"posthog_activitylog"."is_system",
"posthog_activitylog"."activity",
"posthog_activitylog"."item_id",
@@ -219,6 +222,7 @@
"posthog_activitylog"."team_id",
"posthog_activitylog"."organization_id",
"posthog_activitylog"."user_id",
+ "posthog_activitylog"."was_impersonated",
"posthog_activitylog"."is_system",
"posthog_activitylog"."activity",
"posthog_activitylog"."item_id",
diff --git a/posthog/queries/app_metrics/test/test_historical_exports.py b/posthog/queries/app_metrics/test/test_historical_exports.py
index 6bed36981931c..4962548950fbd 100644
--- a/posthog/queries/app_metrics/test/test_historical_exports.py
+++ b/posthog/queries/app_metrics/test/test_historical_exports.py
@@ -345,12 +345,13 @@ def test_historical_export_metrics(self):
def _create_activity_log(self, **kwargs):
log_activity(
- **{
+ **{ # Using dict form so that kwargs can override these defaults
"organization_id": self.team.organization.id,
"team_id": self.team.pk,
"user": self.user,
"item_id": self.plugin_config.pk,
"scope": "PluginConfig",
+ "was_impersonated": False,
**kwargs,
}
)
diff --git a/posthog/queries/base.py b/posthog/queries/base.py
index ac92db83ad00a..b0b559fc7fcaa 100644
--- a/posthog/queries/base.py
+++ b/posthog/queries/base.py
@@ -279,10 +279,19 @@ def property_to_Q(
cohort_id = int(cast(Union[str, int], value))
if cohorts_cache is not None:
if cohorts_cache.get(cohort_id) is None:
- cohorts_cache[cohort_id] = Cohort.objects.using(using_database).get(pk=cohort_id)
- cohort = cohorts_cache[cohort_id]
+ queried_cohort = Cohort.objects.using(using_database).filter(pk=cohort_id).first()
+ if queried_cohort:
+ cohorts_cache[cohort_id] = queried_cohort
+
+ cohort = queried_cohort
+ else:
+ cohort = cohorts_cache[cohort_id]
else:
- cohort = Cohort.objects.using(using_database).get(pk=cohort_id)
+ cohort = Cohort.objects.using(using_database).filter(pk=cohort_id).first()
+
+ if not cohort:
+ # Don't match anything if cohort doesn't exist
+ return Q(pk__isnull=True)
if cohort.is_static:
return Q(
diff --git a/posthog/queries/event_query/event_query.py b/posthog/queries/event_query/event_query.py
index 544afee2a2a10..a8347054d4efc 100644
--- a/posthog/queries/event_query/event_query.py
+++ b/posthog/queries/event_query/event_query.py
@@ -88,7 +88,7 @@ def __init__(
# This issue manifests for us with formulas, where on queries A and B we join events against itself
# and both tables end up having $session_id. Without a formula this is not a problem.]
self._session_id_alias = (
- f"session_id_{self._entity.index}" # type: ignore
+ f"session_id_{self._entity.index}"
if hasattr(self, "_entity") and getattr(self._filter, "formula", None)
else None
)
diff --git a/posthog/queries/funnels/sql.py b/posthog/queries/funnels/sql.py
index e68db9797f41b..8d488f936631d 100644
--- a/posthog/queries/funnels/sql.py
+++ b/posthog/queries/funnels/sql.py
@@ -7,4 +7,5 @@
ORDER BY aggregation_target
{limit}
{offset}
+SETTINGS max_ast_elements=1000000, max_expanded_ast_elements=1000000
"""
diff --git a/posthog/queries/funnels/test/__snapshots__/test_funnel.ambr b/posthog/queries/funnels/test/__snapshots__/test_funnel.ambr
index 5fcc024cfd805..ecd37c1ea0e36 100644
--- a/posthog/queries/funnels/test/__snapshots__/test_funnel.ambr
+++ b/posthog/queries/funnels/test/__snapshots__/test_funnel.ambr
@@ -172,7 +172,8 @@
WHERE steps IN [2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFOSSFunnel.test_funnel_events_with_person_on_events_v2
@@ -659,7 +660,8 @@
WHERE steps IN [1, 2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFOSSFunnel.test_funnel_with_property_groups.2
@@ -766,7 +768,8 @@
WHERE steps IN [2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFOSSFunnel.test_funnel_with_property_groups.3
@@ -873,7 +876,8 @@
WHERE steps IN [3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFOSSFunnel.test_funnel_with_static_cohort_step_filter
diff --git a/posthog/queries/funnels/test/__snapshots__/test_funnel_persons.ambr b/posthog/queries/funnels/test/__snapshots__/test_funnel_persons.ambr
index a1a45650b528b..cf0bcb1dbfe6f 100644
--- a/posthog/queries/funnels/test/__snapshots__/test_funnel_persons.ambr
+++ b/posthog/queries/funnels/test/__snapshots__/test_funnel_persons.ambr
@@ -160,7 +160,8 @@
WHERE steps IN [1, 2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelPersons.test_funnel_person_recordings.1
@@ -335,7 +336,8 @@
WHERE steps IN [2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelPersons.test_funnel_person_recordings.3
@@ -510,7 +512,8 @@
WHERE steps = 2
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelPersons.test_funnel_person_recordings.5
diff --git a/posthog/queries/funnels/test/__snapshots__/test_funnel_strict_persons.ambr b/posthog/queries/funnels/test/__snapshots__/test_funnel_strict_persons.ambr
index fa267c176d958..daf11126b6251 100644
--- a/posthog/queries/funnels/test/__snapshots__/test_funnel_strict_persons.ambr
+++ b/posthog/queries/funnels/test/__snapshots__/test_funnel_strict_persons.ambr
@@ -121,7 +121,8 @@
WHERE steps IN [1, 2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.1
@@ -257,7 +258,8 @@
WHERE steps IN [2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.3
@@ -393,7 +395,8 @@
WHERE steps = 2
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelStrictStepsPersons.test_strict_funnel_person_recordings.5
diff --git a/posthog/queries/funnels/test/__snapshots__/test_funnel_trends.ambr b/posthog/queries/funnels/test/__snapshots__/test_funnel_trends.ambr
index 33f6e9439dafe..df09f5d725fca 100644
--- a/posthog/queries/funnels/test/__snapshots__/test_funnel_trends.ambr
+++ b/posthog/queries/funnels/test/__snapshots__/test_funnel_trends.ambr
@@ -329,6 +329,7 @@
WHERE steps_completed >= 3
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
diff --git a/posthog/queries/funnels/test/__snapshots__/test_funnel_trends_persons.ambr b/posthog/queries/funnels/test/__snapshots__/test_funnel_trends_persons.ambr
index 5a61a961f8fb3..599aeb2728a98 100644
--- a/posthog/queries/funnels/test/__snapshots__/test_funnel_trends_persons.ambr
+++ b/posthog/queries/funnels/test/__snapshots__/test_funnel_trends_persons.ambr
@@ -138,7 +138,8 @@
WHERE steps_completed >= 2
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelTrendsPersons.test_funnel_trend_persons_returns_recordings.1
@@ -292,7 +293,8 @@
AND steps_completed < 3
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_drop_off.1
@@ -445,7 +447,8 @@
WHERE steps_completed >= 3
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelTrendsPersons.test_funnel_trend_persons_with_no_to_step.1
diff --git a/posthog/queries/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr b/posthog/queries/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr
index aef0f3690f8bb..67832329d6684 100644
--- a/posthog/queries/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr
+++ b/posthog/queries/funnels/test/__snapshots__/test_funnel_unordered_persons.ambr
@@ -228,7 +228,8 @@
WHERE steps IN [1, 2, 3]
ORDER BY aggregation_target
LIMIT 100
- OFFSET 0
+ OFFSET 0 SETTINGS max_ast_elements=1000000,
+ max_expanded_ast_elements=1000000
'''
# ---
# name: TestFunnelUnorderedStepsPersons.test_unordered_funnel_does_not_return_recordings.1
diff --git a/posthog/queries/query_date_range.py b/posthog/queries/query_date_range.py
index 7ab077bac0463..954350b21b069 100644
--- a/posthog/queries/query_date_range.py
+++ b/posthog/queries/query_date_range.py
@@ -173,11 +173,11 @@ def delta(self) -> timedelta:
def num_intervals(self) -> int:
if not hasattr(self._filter, "interval"):
return 1
- if self._filter.interval == "month": # type: ignore
+ if self._filter.interval == "month":
rel_delta = relativedelta(self.date_to_param, self.date_from_param)
return (rel_delta.years * 12) + rel_delta.months + 1
- return int(self.delta.total_seconds() / TIME_IN_SECONDS[self._filter.interval]) + 1 # type: ignore
+ return int(self.delta.total_seconds() / TIME_IN_SECONDS[self._filter.interval]) + 1
@cached_property
def should_round(self) -> bool:
@@ -188,14 +188,14 @@ def should_round(self) -> bool:
return False
round_interval = False
- if self._filter.interval in ["week", "month"]: # type: ignore
+ if self._filter.interval in ["week", "month"]:
round_interval = True
else:
- round_interval = self.delta.total_seconds() >= TIME_IN_SECONDS[self._filter.interval] * 2 # type: ignore
+ round_interval = self.delta.total_seconds() >= TIME_IN_SECONDS[self._filter.interval] * 2
return round_interval
def is_hourly(self, target):
if not hasattr(self._filter, "interval"):
return False
- return self._filter.interval == "hour" or (target and isinstance(target, str) and "h" in target) # type: ignore
+ return self._filter.interval == "hour" or (target and isinstance(target, str) and "h" in target)
diff --git a/posthog/schema.py b/posthog/schema.py
index e800f33786b78..4461862237712 100644
--- a/posthog/schema.py
+++ b/posthog/schema.py
@@ -2060,7 +2060,7 @@ class InsightActorsQuery(BaseModel):
model_config = ConfigDict(
extra="forbid",
)
- day: Optional[str] = None
+ day: Optional[Union[str, int]] = None
interval: Optional[int] = Field(
default=None, description="An interval selected out of available intervals in source query"
)
diff --git a/posthog/session_recordings/realtime_snapshots.py b/posthog/session_recordings/realtime_snapshots.py
index e1191c4ddb37e..5068fa8fa994a 100644
--- a/posthog/session_recordings/realtime_snapshots.py
+++ b/posthog/session_recordings/realtime_snapshots.py
@@ -14,7 +14,7 @@
PUBLISHED_REALTIME_SUBSCRIPTIONS_COUNTER = Counter(
"realtime_snapshots_published_subscription_counter",
"When the API is serving snapshot requests and wants to receive snapshots via a redis subscription.",
- labelnames=["team_id", "session_id", "attempt_count"],
+ labelnames=["attempt_count"],
)
REALTIME_SUBSCRIPTIONS_LOADED_COUNTER = Counter(
@@ -23,27 +23,49 @@
labelnames=["attempt_count"],
)
+
SUBSCRIPTION_CHANNEL = "@posthog/replay/realtime-subscriptions"
-ATTEMPT_MAX = 10
-ATTEMPT_TIMEOUT_SECONDS = 5
+
+ATTEMPT_MAX = 6
+ATTEMPT_TIMEOUT_SECONDS = 0.1
def get_key(team_id: str, suffix: str) -> str:
return f"@posthog/replay/snapshots/team-{team_id}/{suffix}"
-def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Optional[List[Dict]]:
+def publish_subscription(team_id: str, session_id: str) -> None:
+ """
+ Publishing a subscription notifies each instance of Mr Blobby of the request for realtime playback
+ Only zero or one instances will be handling the session, if they are, they will start publishing
+ the snapshot data to Redis so that it can be played before the data has been sent to blob storage
+ """
try:
redis = get_client(settings.SESSION_RECORDING_REDIS_URL)
- key = get_key(team_id, session_id)
- encoded_snapshots = redis.zrange(key, 0, -1, withscores=True)
-
- # We always publish as it could be that a rebalance has occured and the consumer doesn't know it should be
- # sending data to redis
redis.publish(
SUBSCRIPTION_CHANNEL,
json.dumps({"team_id": team_id, "session_id": session_id}),
)
+ except Exception as e:
+ capture_exception(
+ e,
+ extras={
+ "operation": "publish_realtime_subscription",
+ },
+ tags={"team_id": team_id, "session_id": session_id},
+ )
+ raise e
+
+
+def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Optional[List[Dict]]:
+ try:
+ redis = get_client(settings.SESSION_RECORDING_REDIS_URL)
+ key = get_key(team_id, session_id)
+ encoded_snapshots = redis.zrange(key, 0, -1, withscores=True)
+
+ # We always publish as it could be that a rebalance has occurred
+ # and the consumer doesn't know it should be sending data to redis
+ publish_subscription(team_id, session_id)
if not encoded_snapshots and attempt_count < ATTEMPT_MAX:
logger.info(
@@ -52,16 +74,12 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op
session_id=session_id,
attempt_count=attempt_count,
)
- # If we don't have it we could be in the process of getting it and syncing it
- redis.publish(
- SUBSCRIPTION_CHANNEL,
- json.dumps({"team_id": team_id, "session_id": session_id}),
- )
- PUBLISHED_REALTIME_SUBSCRIPTIONS_COUNTER.labels(
- team_id=team_id, session_id=session_id, attempt_count=attempt_count
- ).inc()
- sleep(ATTEMPT_TIMEOUT_SECONDS / ATTEMPT_MAX)
+ PUBLISHED_REALTIME_SUBSCRIPTIONS_COUNTER.labels(attempt_count=attempt_count).inc()
+
+ # this means we'll sleep 0.1, 0.1, 0,1, 0.2, 0.2, 0.2
+ # for a total of 0.9 seconds
+ sleep(ATTEMPT_TIMEOUT_SECONDS if attempt_count < 4 else ATTEMPT_TIMEOUT_SECONDS * 2)
return get_realtime_snapshots(team_id, session_id, attempt_count + 1)
if encoded_snapshots:
diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py
index 915fbc83d68ba..31fe842a5ba2d 100644
--- a/posthog/session_recordings/session_recording_api.py
+++ b/posthog/session_recordings/session_recording_api.py
@@ -45,7 +45,7 @@
ClickHouseSustainedRateThrottle,
)
from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents
-from posthog.session_recordings.realtime_snapshots import get_realtime_snapshots
+from posthog.session_recordings.realtime_snapshots import get_realtime_snapshots, publish_subscription
from posthog.session_recordings.snapshots.convert_legacy_snapshots import (
convert_original_version_lts_recording,
)
@@ -379,6 +379,11 @@ def snapshots(self, request: request.Request, **kwargs):
"end_timestamp": None,
}
)
+ # the UI will use this to try to load realtime snapshots
+ # so, we can publish the request for Mr. Blobby to start syncing to Redis now
+ # it takes a short while for the subscription to be sync'd into redis
+ # let's use the network round trip time to get started
+ publish_subscription(team_id=str(self.team.pk), session_id=str(recording.session_id))
response_data["sources"] = sources
diff --git a/posthog/settings/web.py b/posthog/settings/web.py
index 36309548bb4a4..08c7f00769619 100644
--- a/posthog/settings/web.py
+++ b/posthog/settings/web.py
@@ -142,6 +142,7 @@
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
+ "loginas.context_processors.impersonated_session_status",
]
},
}
diff --git a/posthog/templates/head.html b/posthog/templates/head.html
index ed0d359faa014..f0ac06dea5a1a 100644
--- a/posthog/templates/head.html
+++ b/posthog/templates/head.html
@@ -22,7 +22,7 @@
window.JS_POSTHOG_HOST = {{js_posthog_host | safe}};
window.JS_POSTHOG_SELF_CAPTURE = {{self_capture | yesno:"true,false" }};
window.POSTHOG_USER_IDENTITY_WITH_FLAGS = JSON.parse("{{ posthog_bootstrap | escapejs }}")
- window.IMPERSONATED_SESSION = {{impersonated_session | yesno:"true,false"}};
+ window.IMPERSONATED_SESSION = {{is_impersonated_session | yesno:"true,false"}};
window.POSTHOG_JS_UUID_VERSION = "{{posthog_js_uuid_version}}";
{% endif %}
diff --git a/posthog/temporal/tests/batch_exports/test_logger.py b/posthog/temporal/tests/batch_exports/test_logger.py
index a3ceb011e27a2..5c12cef1d034a 100644
--- a/posthog/temporal/tests/batch_exports/test_logger.py
+++ b/posthog/temporal/tests/batch_exports/test_logger.py
@@ -399,9 +399,9 @@ async def log_activity():
if iterations > 10:
raise TimeoutError("Timedout waiting for logs")
- assert len(results) == 1 # type: ignore
+ assert len(results) == 1
- row = results[0] # type: ignore
+ row = results[0]
assert row[0] == activity_environment.info.workflow_run_id
assert row[1] == "info"
assert row[2] == expected_log_source
diff --git a/posthog/test/activity_logging/test_activity_logging.py b/posthog/test/activity_logging/test_activity_logging.py
index 9e2030779f1a9..8bfe40c7034fa 100644
--- a/posthog/test/activity_logging/test_activity_logging.py
+++ b/posthog/test/activity_logging/test_activity_logging.py
@@ -25,6 +25,7 @@ def test_can_save_a_model_changed_activity_log(self) -> None:
organization_id=self.organization.id,
team_id=self.team.id,
user=self.user,
+ was_impersonated=False,
item_id=6,
scope="FeatureFlag",
activity="updated",
@@ -45,6 +46,7 @@ def test_can_save_a_log_that_has_no_model_changes(self) -> None:
organization_id=self.organization.id,
team_id=self.team.id,
user=self.user,
+ was_impersonated=False,
item_id=None,
scope="dinglehopper",
activity="added_to_clink_expander",
@@ -53,20 +55,21 @@ def test_can_save_a_log_that_has_no_model_changes(self) -> None:
log: ActivityLog = ActivityLog.objects.latest("id")
self.assertEqual(log.activity, "added_to_clink_expander")
- def test_does_not_save_an_updated_activity_that_has_no_changes(self) -> None:
+ def test_does_not_save_impersonated_activity_without_user(self) -> None:
log_activity(
organization_id=self.organization.id,
team_id=self.team.id,
- user=self.user,
+ user=None,
+ was_impersonated=True,
item_id=None,
scope="dinglehopper",
- activity="updated",
+ activity="added_to_clink_expander",
detail=Detail(),
)
with pytest.raises(ActivityLog.DoesNotExist):
ActivityLog.objects.latest("id")
- def test_can_not_save_if_there_is_neither_a_team_id_nor_an_organisation_id(self) -> None:
+ def test_does_not_save_if_there_is_neither_a_team_id_nor_an_organisation_id(self) -> None:
# even when there are logs with team id or org id saved
ActivityLog.objects.create(team_id=3)
ActivityLog.objects.create(organization_id=UUIDT())
@@ -81,20 +84,22 @@ def test_can_not_save_if_there_is_neither_a_team_id_nor_an_organisation_id(self)
def test_does_not_throw_if_cannot_log_activity(self) -> None:
with self.assertLogs(level="WARN") as log:
- try:
- log_activity(
- organization_id=UUIDT(),
- team_id=1,
- # will cause logging to raise exception because user is unsaved
- # avoids needing to mock anything to force the exception
- user=User(first_name="testy", email="test@example.com"),
- item_id="12345",
- scope="testing throwing exceptions on create",
- activity="does not explode",
- detail=Detail(),
- )
- except Exception as e:
- raise pytest.fail(f"Should not have raised exception: {e}")
+ with self.settings(TEST=False): # Enable production-level silencing
+ try:
+ log_activity(
+ organization_id=UUIDT(),
+ team_id=1,
+ # will cause logging to raise exception because user is unsaved
+ # avoids needing to mock anything to force the exception
+ user=User(first_name="testy", email="test@example.com"),
+ was_impersonated=False,
+ item_id="12345",
+ scope="testing throwing exceptions on create",
+ activity="does not explode",
+ detail=Detail(),
+ )
+ except Exception as e:
+ raise pytest.fail(f"Should not have raised exception: {e}")
logged_warning = log.records[0].__dict__
self.assertEqual(logged_warning["levelname"], "WARNING")
diff --git a/posthog/utils.py b/posthog/utils.py
index ecefc4ea1f6e0..1689eef3f6cc9 100644
--- a/posthog/utils.py
+++ b/posthog/utils.py
@@ -309,12 +309,10 @@ def render_template(
If team_for_public_context is provided, this means this is a public page such as a shared dashboard.
"""
- from loginas.utils import is_impersonated_session
template = get_template(template_name)
context["opt_out_capture"] = settings.OPT_OUT_CAPTURE
- context["impersonated_session"] = is_impersonated_session(request)
context["self_capture"] = settings.SELF_CAPTURE
if sentry_dsn := os.environ.get("SENTRY_DSN"):