diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000..65971db798cd3 --- /dev/null +++ b/.env.example @@ -0,0 +1 @@ +MAPLIBRE_STYLE_URL=https://api.example.com/style.json?key=mykey \ No newline at end of file diff --git a/.eslintrc.js b/.eslintrc.js index 3d136069d31cb..7c47fb0b89d16 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -18,6 +18,12 @@ module.exports = { react: { version: 'detect', }, + 'import/resolver': { + node: { + paths: ['eslint-rules'], // Add the directory containing your custom rules + extensions: ['.js', '.jsx', '.ts', '.tsx'], // Ensure ESLint resolves both JS and TS files + }, + }, }, extends: [ 'eslint:recommended', @@ -37,7 +43,7 @@ module.exports = { ecmaVersion: 2018, sourceType: 'module', }, - plugins: ['prettier', 'react', 'cypress', '@typescript-eslint', 'no-only-tests', 'jest', 'compat'], + plugins: ['prettier', 'react', 'cypress', '@typescript-eslint', 'no-only-tests', 'jest', 'compat', 'posthog'], rules: { 'no-console': ['error', { allow: ['warn', 'error'] }], 'no-debugger': 'error', @@ -91,7 +97,7 @@ module.exports = { ], }, ], - 'react/forbid-elements': [ + 'posthog/warn-elements': [ 1, { forbid: [ @@ -236,6 +242,16 @@ module.exports = { '@typescript-eslint/no-var-requires': 'off', }, }, + { + files: 'eslint-rules/**/*', + extends: ['eslint:recommended'], + rules: { + '@typescript-eslint/no-var-requires': 'off', + }, + env: { + node: true, + }, + }, ], reportUnusedDisableDirectives: true, } diff --git a/.storybook/decorators/with3000.tsx b/.storybook/decorators/with3000.tsx deleted file mode 100644 index 67c13abade016..0000000000000 --- a/.storybook/decorators/with3000.tsx +++ /dev/null @@ -1,20 +0,0 @@ -import { useMountedLogic } from 'kea' -import { FEATURE_FLAGS } from 'lib/constants' -import { useEffect } from 'react' -import { themeLogic } from '~/layout/navigation-3000/themeLogic' -import { useFeatureFlags } from '~/mocks/browser' -import type { DecoratorFn } from '@storybook/react' - -/** Activate PostHog 3000. */ -export const with3000: DecoratorFn = (Story) => { - useFeatureFlags([FEATURE_FLAGS.POSTHOG_3000]) - useMountedLogic(themeLogic) - useEffect(() => { - document.body.classList.add('posthog-3000') - return () => { - document.body.classList.remove('posthog-3000') - } - }) - - return -} diff --git a/.storybook/decorators/withFeatureFlags.tsx b/.storybook/decorators/withFeatureFlags.tsx index 307c9f8943f16..752630fb4b400 100644 --- a/.storybook/decorators/withFeatureFlags.tsx +++ b/.storybook/decorators/withFeatureFlags.tsx @@ -1,5 +1,5 @@ -import { useFeatureFlags } from '~/mocks/browser' -import type { DecoratorFn } from '@storybook/react' +import { setFeatureFlags } from '~/mocks/browser' +import type { Decorator } from '@storybook/react' /** Global story decorator that allows setting feature flags. * @@ -13,9 +13,9 @@ import type { DecoratorFn } from '@storybook/react' * } as ComponentMeta * ``` */ -export const withFeatureFlags: DecoratorFn = (Story, { parameters }) => { +export const withFeatureFlags: Decorator = (Story, { parameters }) => { if (parameters.featureFlags) { - useFeatureFlags(parameters.featureFlags) + setFeatureFlags(parameters.featureFlags) } return diff --git a/.storybook/decorators/withKea/withKea.tsx b/.storybook/decorators/withKea/withKea.tsx index 6abad47e4b55c..67d41cb9339f2 100644 --- a/.storybook/decorators/withKea/withKea.tsx +++ b/.storybook/decorators/withKea/withKea.tsx @@ -1,9 +1,9 @@ -import type { DecoratorFn } from '@storybook/react' +import type { Decorator } from '@storybook/react' import { useAvailableFeatures } from '~/mocks/features' import { KeaStory } from './kea-story' -export const withKea: DecoratorFn = (Story) => { +export const withKea: Decorator = (Story) => { // Reset enabled enterprise features. Overwrite this line within your stories. useAvailableFeatures([]) return ( diff --git a/.storybook/decorators/withMockDate.tsx b/.storybook/decorators/withMockDate.tsx index d84e333871d77..0582d225a5f90 100644 --- a/.storybook/decorators/withMockDate.tsx +++ b/.storybook/decorators/withMockDate.tsx @@ -1,4 +1,4 @@ -import type { DecoratorFn } from '@storybook/react' +import type { Decorator } from '@storybook/react' import MockDate from 'mockdate' /** Global story decorator that allows mocking of dates. @@ -13,7 +13,7 @@ import MockDate from 'mockdate' * } as ComponentMeta * ``` */ -export const withMockDate: DecoratorFn = (Story, { parameters }) => { +export const withMockDate: Decorator = (Story, { parameters }) => { if (parameters.mockDate) { MockDate.set(parameters.mockDate) } else { diff --git a/.storybook/decorators/withSnapshotsDisabled.tsx b/.storybook/decorators/withSnapshotsDisabled.tsx index e2cd4e2edf906..6e7598c7a9c7e 100644 --- a/.storybook/decorators/withSnapshotsDisabled.tsx +++ b/.storybook/decorators/withSnapshotsDisabled.tsx @@ -1,9 +1,9 @@ -import { DecoratorFn } from '@storybook/react' +import { Decorator } from '@storybook/react' import { inStorybookTestRunner } from 'lib/utils' /** Workaround for https://github.com/storybookjs/test-runner/issues/74 */ // TODO: Smoke-test all the stories by removing this decorator, once all the stories pass -export const withSnapshotsDisabled: DecoratorFn = (Story, { parameters }) => { +export const withSnapshotsDisabled: Decorator = (Story, { parameters }) => { if (parameters?.testOptions?.skip && inStorybookTestRunner()) { return <>Disabled for Test Runner } diff --git a/.storybook/decorators/withTheme.tsx b/.storybook/decorators/withTheme.tsx new file mode 100644 index 0000000000000..1749a4244c497 --- /dev/null +++ b/.storybook/decorators/withTheme.tsx @@ -0,0 +1,44 @@ +import type { Decorator } from '@storybook/react' + +import { FEATURE_FLAGS } from 'lib/constants' + +/** Global story decorator that is used by the theming control to + * switch between themes. + */ +export const withTheme: Decorator = (Story, context) => { + const theme = context.globals.theme + + // set the body class + const actualClassState = document.body.classList.contains('posthog-3000') + const desiredClassState = theme !== 'legacy' + + if (actualClassState !== desiredClassState) { + if (desiredClassState) { + document.body.classList.add('posthog-3000') + } else { + document.body.classList.remove('posthog-3000') + } + } + + // set the feature flag + const actualFeatureFlagState = window.POSTHOG_APP_CONTEXT!.persisted_feature_flags?.includes( + FEATURE_FLAGS.POSTHOG_3000 + ) + const desiredFeatureFlagState = theme !== 'legacy' + + if (actualFeatureFlagState !== desiredFeatureFlagState) { + const currentFlags = window.POSTHOG_APP_CONTEXT!.persisted_feature_flags || [] + if (desiredFeatureFlagState) { + window.POSTHOG_APP_CONTEXT!.persisted_feature_flags = [...currentFlags, FEATURE_FLAGS.POSTHOG_3000] + } else { + window.POSTHOG_APP_CONTEXT!.persisted_feature_flags = currentFlags.filter( + (f) => f !== FEATURE_FLAGS.POSTHOG_3000 + ) + } + } + + // set the theme + document.body.setAttribute('theme', theme === 'dark' ? 'dark' : 'light') + + return +} diff --git a/.storybook/preview.tsx b/.storybook/preview.tsx index 9b0a76da1d367..091884046929e 100644 --- a/.storybook/preview.tsx +++ b/.storybook/preview.tsx @@ -10,6 +10,7 @@ import { withMockDate } from './decorators/withMockDate' import { defaultMocks } from '~/mocks/handlers' import { withSnapshotsDisabled } from './decorators/withSnapshotsDisabled' import { withFeatureFlags } from './decorators/withFeatureFlags' +import { withTheme } from './decorators/withTheme' const setupMsw = () => { // Make sure the msw worker is started @@ -86,6 +87,8 @@ export const decorators: Meta['decorators'] = [ withMockDate, // Allow us to easily set feature flags in stories. withFeatureFlags, + // Set theme from global context + withTheme, ] const preview: Preview = { @@ -110,6 +113,22 @@ const preview: Preview = { ), }, }, + globalTypes: { + theme: { + description: '', + defaultValue: 'legacy', + toolbar: { + title: 'Theme', + items: [ + { value: 'legacy', icon: 'faceneutral', title: 'Legacy' }, + { value: 'light', icon: 'sun', title: 'Light' }, + { value: 'dark', icon: 'moon', title: 'Dark' }, + ], + // change the title based on the selected value + dynamicTitle: true, + }, + }, + }, } export default preview diff --git a/docker-compose.base.yml b/docker-compose.base.yml index dba92a0046034..f1d1a3f658e64 100644 --- a/docker-compose.base.yml +++ b/docker-compose.base.yml @@ -89,6 +89,18 @@ services: command: ./bin/start-backend & ./bin/start-frontend restart: on-failure + capture: + image: ghcr.io/posthog/capture:main + restart: on-failure + environment: + ADDRESS: '0.0.0.0:3000' + KAFKA_TOPIC: 'events_plugin_ingestion' + KAFKA_HOSTS: 'kafka:9092' + REDIS_URL: 'redis://redis:6379/' + depends_on: + - redis + - kafka + plugins: command: ./bin/plugin-server --no-restart-loop restart: on-failure diff --git a/docker-compose.dev-full.yml b/docker-compose.dev-full.yml index ba4a40185c7d6..6d62266e6b8a1 100644 --- a/docker-compose.dev-full.yml +++ b/docker-compose.dev-full.yml @@ -89,6 +89,15 @@ services: environment: - DEBUG=1 + capture: + extends: + file: docker-compose.base.yml + service: capture + ports: + - 3000:3000 + environment: + - DEBUG=1 + plugins: extends: file: docker-compose.base.yml diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 2045dee0804c5..365ec2c5b452d 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -72,6 +72,17 @@ services: - '1080:1080' - '1025:1025' + # Optional capture + capture: + profiles: ['capture-rs'] + extends: + file: docker-compose.base.yml + service: capture + ports: + - 3000:3000 + environment: + - DEBUG=1 + # Temporal containers elasticsearch: extends: diff --git a/ee/api/authentication.py b/ee/api/authentication.py index a1c12c61c857f..2dfb6c7b9f053 100644 --- a/ee/api/authentication.py +++ b/ee/api/authentication.py @@ -24,7 +24,6 @@ @api_view(["GET"]) def saml_metadata_view(request, *args, **kwargs): - if ( not request.user.organization_memberships.get(organization=request.user.organization).level >= OrganizationMembership.Level.ADMIN @@ -46,7 +45,6 @@ class MultitenantSAMLAuth(SAMLAuth): """ def get_idp(self, organization_domain_or_id: Union["OrganizationDomain", str]): - try: organization_domain = ( organization_domain_or_id @@ -57,7 +55,10 @@ def get_idp(self, organization_domain_or_id: Union["OrganizationDomain", str]): raise AuthFailed("saml", "Authentication request is invalid. Invalid RelayState.") if not organization_domain.organization.is_feature_available(AvailableFeature.SAML): - raise AuthFailed("saml", "Your organization does not have the required license to use SAML.") + raise AuthFailed( + "saml", + "Your organization does not have the required license to use SAML.", + ) return SAMLIdentityProvider( str(organization_domain.id), @@ -88,7 +89,12 @@ def auth_url(self): # name, since we multiple IdPs share the same auth_complete URL. return auth.login(return_to=str(instance.id)) - def _get_attr(self, response_attributes: Dict[str, Any], attribute_names: List[str], optional: bool = False) -> str: + def _get_attr( + self, + response_attributes: Dict[str, Any], + attribute_names: List[str], + optional: bool = False, + ) -> str: """ Fetches a specific attribute from the SAML response, attempting with multiple different attribute names. We attempt multiple attribute names to make it easier for admins to configure SAML (less configuration to set). @@ -114,7 +120,9 @@ def get_user_details(self, response): attributes = response["attributes"] return { "fullname": self._get_attr( - attributes, ["full_name", "FULL_NAME", "fullName", OID_COMMON_NAME], optional=True + attributes, + ["full_name", "FULL_NAME", "fullName", OID_COMMON_NAME], + optional=True, ), "first_name": self._get_attr( attributes, @@ -140,7 +148,12 @@ def get_user_details(self, response): ), "email": self._get_attr( attributes, - ["email", "EMAIL", "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress", OID_MAIL], + [ + "email", + "EMAIL", + "http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress", + OID_MAIL, + ], ), } diff --git a/ee/api/billing.py b/ee/api/billing.py index ce6495bc8664b..08e0fc08a9719 100644 --- a/ee/api/billing.py +++ b/ee/api/billing.py @@ -74,7 +74,11 @@ def patch(self, request: Request, *args: Any, **kwargs: Any) -> Response: BillingManager(license).update_billing(org, {"custom_limits_usd": custom_limits_usd}) if distinct_id: - posthoganalytics.capture(distinct_id, "billing limits updated", properties={**custom_limits_usd}) + posthoganalytics.capture( + distinct_id, + "billing limits updated", + properties={**custom_limits_usd}, + ) posthoganalytics.group_identify( "organization", str(org.id), @@ -143,7 +147,8 @@ def license(self, request: Request, *args: Any, **kwargs: Any) -> HttpResponse: license = License(key=serializer.validated_data["license"]) res = requests.get( - f"{BILLING_SERVICE_URL}/api/billing", headers=BillingManager(license).get_auth_headers(organization) + f"{BILLING_SERVICE_URL}/api/billing", + headers=BillingManager(license).get_auth_headers(organization), ) if res.status_code != 200: diff --git a/ee/api/dashboard_collaborator.py b/ee/api/dashboard_collaborator.py index 8b26261c1c343..e9b339b921b0e 100644 --- a/ee/api/dashboard_collaborator.py +++ b/ee/api/dashboard_collaborator.py @@ -89,7 +89,11 @@ class DashboardCollaboratorViewSet( mixins.DestroyModelMixin, viewsets.GenericViewSet, ): - permission_classes = [IsAuthenticated, TeamMemberAccessPermission, CanEditDashboardCollaborator] + permission_classes = [ + IsAuthenticated, + TeamMemberAccessPermission, + CanEditDashboardCollaborator, + ] pagination_class = None queryset = DashboardPrivilege.objects.select_related("dashboard").filter(user__is_active=True) lookup_field = "user__uuid" diff --git a/ee/api/ee_event_definition.py b/ee/api/ee_event_definition.py index 4330ce554e775..e83b293b8caaa 100644 --- a/ee/api/ee_event_definition.py +++ b/ee/api/ee_event_definition.py @@ -4,7 +4,11 @@ from ee.models.event_definition import EnterpriseEventDefinition from posthog.api.shared import UserBasicSerializer from posthog.api.tagged_item import TaggedItemSerializerMixin -from posthog.models.activity_logging.activity_log import dict_changes_between, log_activity, Detail +from posthog.models.activity_logging.activity_log import ( + dict_changes_between, + log_activity, + Detail, +) class EnterpriseEventDefinitionSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer): diff --git a/ee/api/ee_property_definition.py b/ee/api/ee_property_definition.py index bac7a664de0cb..aa190bbd7c72d 100644 --- a/ee/api/ee_property_definition.py +++ b/ee/api/ee_property_definition.py @@ -4,7 +4,11 @@ from posthog.api.shared import UserBasicSerializer from posthog.api.tagged_item import TaggedItemSerializerMixin from posthog.models import PropertyDefinition -from posthog.models.activity_logging.activity_log import dict_changes_between, log_activity, Detail +from posthog.models.activity_logging.activity_log import ( + dict_changes_between, + log_activity, + Detail, +) class EnterprisePropertyDefinitionSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer): diff --git a/ee/api/explicit_team_member.py b/ee/api/explicit_team_member.py index b1a01221f5bbc..723e317a86a55 100644 --- a/ee/api/explicit_team_member.py +++ b/ee/api/explicit_team_member.py @@ -34,15 +34,24 @@ class Meta: "user_uuid", # write_only (see above) "effective_level", # read_only (calculated) ] - read_only_fields = ["id", "parent_membership_id", "joined_at", "updated_at", "user", "effective_level"] + read_only_fields = [ + "id", + "parent_membership_id", + "joined_at", + "updated_at", + "user", + "effective_level", + ] def create(self, validated_data): team: Team = self.context["get_team"]() user_uuid = validated_data.pop("user_uuid") validated_data["team"] = team try: - requesting_parent_membership: OrganizationMembership = OrganizationMembership.objects.get( - organization_id=team.organization_id, user__uuid=user_uuid, user__is_active=True + requesting_parent_membership: (OrganizationMembership) = OrganizationMembership.objects.get( + organization_id=team.organization_id, + user__uuid=user_uuid, + user__is_active=True, ) except OrganizationMembership.DoesNotExist: raise exceptions.PermissionDenied("You both need to belong to the same organization.") diff --git a/ee/api/feature_flag_role_access.py b/ee/api/feature_flag_role_access.py index 1eb4fa58ab0be..d3ca7a68c1a32 100644 --- a/ee/api/feature_flag_role_access.py +++ b/ee/api/feature_flag_role_access.py @@ -29,7 +29,9 @@ def has_permission(self, request, view): ) if resource_access.access_level == OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT: return True - except OrganizationResourceAccess.DoesNotExist: # no organization resource access for this means full default edit access + except ( + OrganizationResourceAccess.DoesNotExist + ): # no organization resource access for this means full default edit access return True try: feature_flag: FeatureFlag = FeatureFlag.objects.get(id=view.parents_query_dict["feature_flag_id"]) diff --git a/ee/api/hooks.py b/ee/api/hooks.py index f1457d483fd47..96eba47f14378 100644 --- a/ee/api/hooks.py +++ b/ee/api/hooks.py @@ -8,7 +8,10 @@ from ee.models.hook import Hook from posthog.api.routing import StructuredViewSetMixin from posthog.models.user import User -from posthog.permissions import OrganizationMemberPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + OrganizationMemberPermissions, + TeamMemberAccessPermission, +) class HookSerializer(serializers.ModelSerializer): @@ -35,7 +38,11 @@ class HookViewSet(StructuredViewSetMixin, viewsets.ModelViewSet): queryset = Hook.objects.all() ordering = "-created_at" - permission_classes = [IsAuthenticated, OrganizationMemberPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + OrganizationMemberPermissions, + TeamMemberAccessPermission, + ] serializer_class = HookSerializer def perform_create(self, serializer): diff --git a/ee/api/integration.py b/ee/api/integration.py index b4ef4ac35beff..d7da62a31a2ee 100644 --- a/ee/api/integration.py +++ b/ee/api/integration.py @@ -8,7 +8,11 @@ from ee.tasks.slack import handle_slack_event from posthog.api.integration import IntegrationSerializer -from posthog.models.integration import Integration, SlackIntegration, SlackIntegrationError +from posthog.models.integration import ( + Integration, + SlackIntegration, + SlackIntegrationError, +) class PublicIntegrationViewSet(viewsets.GenericViewSet): diff --git a/ee/api/organization_resource_access.py b/ee/api/organization_resource_access.py index a938d59a320eb..618e59475a0be 100644 --- a/ee/api/organization_resource_access.py +++ b/ee/api/organization_resource_access.py @@ -10,7 +10,15 @@ class OrganizationResourceAccessSerializer(serializers.ModelSerializer): class Meta: model = OrganizationResourceAccess - fields = ["id", "resource", "access_level", "organization", "created_at", "updated_at", "created_by"] + fields = [ + "id", + "resource", + "access_level", + "organization", + "created_at", + "updated_at", + "created_by", + ] read_only_fields = ["id", "created_at", "created_by", "organization"] def validate_resource(self, resource): diff --git a/ee/api/role.py b/ee/api/role.py index 323b9f9c022b2..7114ef4f0110f 100644 --- a/ee/api/role.py +++ b/ee/api/role.py @@ -25,7 +25,7 @@ class RolePermissions(BasePermission): def has_permission(self, request, view): organization = request.user.organization - requesting_membership: OrganizationMembership = OrganizationMembership.objects.get( + requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get( user_id=cast(User, request.user).id, organization=organization, ) @@ -42,7 +42,15 @@ class RoleSerializer(serializers.ModelSerializer): class Meta: model = Role - fields = ["id", "name", "feature_flags_access_level", "created_at", "created_by", "members", "associated_flags"] + fields = [ + "id", + "name", + "feature_flags_access_level", + "created_at", + "created_by", + "members", + "associated_flags", + ] read_only_fields = ["id", "created_at", "created_by"] def validate_name(self, name): @@ -55,7 +63,8 @@ def create(self, validated_data): validated_data["organization"] = organization try: default_flags_org_setting = OrganizationResourceAccess.objects.get( - organization=organization, resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS + organization=organization, + resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, ).access_level except OrganizationResourceAccess.DoesNotExist: default_flags_org_setting = OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT diff --git a/ee/api/sentry_stats.py b/ee/api/sentry_stats.py index e0a15e2019a1b..52b16647c2cbf 100644 --- a/ee/api/sentry_stats.py +++ b/ee/api/sentry_stats.py @@ -43,7 +43,6 @@ def get_sentry_stats(start_time: str, end_time: str) -> Tuple[dict, int]: def get_tagged_issues_stats( start_time: str, end_time: str, tags: Dict[str, str], target_issues: List[str] ) -> Dict[str, Any]: - sentry_config: Dict[str, str] = get_instance_settings(["SENTRY_AUTH_TOKEN", "SENTRY_ORGANIZATION"]) org_slug = sentry_config.get("SENTRY_ORGANIZATION") @@ -92,7 +91,6 @@ def get_stats_for_timerange( target_end_time: str, tags: Optional[Dict[str, str]] = None, ) -> Tuple[int, int]: - base_counts, base_total_count = get_sentry_stats(base_start_time, base_end_time) target_counts, target_total_count = get_sentry_stats(target_start_time, target_end_time) @@ -101,7 +99,6 @@ def get_stats_for_timerange( @api_view(["GET"]) def sentry_stats(request: HttpRequest): - try: current_time = datetime.utcnow() target_end_date = current_time.strftime("%Y-%m-%dT%H:%M:%S") diff --git a/ee/api/test/base.py b/ee/api/test/base.py index 05691d8c9153d..1fb46cceae1cd 100644 --- a/ee/api/test/base.py +++ b/ee/api/test/base.py @@ -21,7 +21,12 @@ def license_required_response( self, message: str = "This feature is part of the premium PostHog offering. Self-hosted licenses are no longer available for purchase. Please contact sales@posthog.com to discuss options.", ) -> Dict[str, Optional[str]]: - return {"type": "server_error", "code": "payment_required", "detail": message, "attr": None} + return { + "type": "server_error", + "code": "payment_required", + "detail": message, + "attr": None, + } @classmethod def setUpTestData(cls): diff --git a/ee/api/test/test_action.py b/ee/api/test/test_action.py index cfdce454fc449..d451db03dfd5c 100644 --- a/ee/api/test/test_action.py +++ b/ee/api/test/test_action.py @@ -17,7 +17,9 @@ def test_create_action_update_delete_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) response = self.client.post(f"/api/projects/{self.team.id}/actions/", data={"name": "user signed up"}) @@ -32,7 +34,8 @@ def test_create_action_update_delete_tags(self): self.assertEqual(set(response.json()["tags"]), {"hello", "random"}) response = self.client.patch( - f"/api/projects/{self.team.id}/actions/{response.json()['id']}", data={"name": "user signed up", "tags": []} + f"/api/projects/{self.team.id}/actions/{response.json()['id']}", + data={"name": "user signed up", "tags": []}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -42,12 +45,17 @@ def test_create_action_with_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) response = self.client.post( f"/api/projects/{self.team.id}/actions/", - data={"name": "user signed up", "tags": ["nightly", "is", "a", "good", "girl"]}, + data={ + "name": "user signed up", + "tags": ["nightly", "is", "a", "good", "girl"], + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(set(response.json()["tags"]), {"nightly", "is", "a", "good", "girl"}) @@ -56,7 +64,9 @@ def test_actions_does_not_nplus1(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) # Ensure the cloud check is cached to not affect the number of queries @@ -79,10 +89,13 @@ def test_actions_no_duplicate_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) response = self.client.post( - f"/api/projects/{self.team.id}/actions/", data={"name": "user signed up", "tags": ["a", "b", "a"]} + f"/api/projects/{self.team.id}/actions/", + data={"name": "user signed up", "tags": ["a", "b", "a"]}, ) self.assertListEqual(sorted(response.json()["tags"]), ["a", "b"]) diff --git a/ee/api/test/test_authentication.py b/ee/api/test/test_authentication.py index cd8ebb454eabb..00fca66b2914b 100644 --- a/ee/api/test/test_authentication.py +++ b/ee/api/test/test_authentication.py @@ -31,7 +31,10 @@ "SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET": "google_secret", } -GITHUB_MOCK_SETTINGS = {"SOCIAL_AUTH_GITHUB_KEY": "github_key", "SOCIAL_AUTH_GITHUB_SECRET": "github_secret"} +GITHUB_MOCK_SETTINGS = { + "SOCIAL_AUTH_GITHUB_KEY": "github_key", + "SOCIAL_AUTH_GITHUB_SECRET": "github_secret", +} CURRENT_FOLDER = os.path.dirname(__file__) @@ -51,7 +54,10 @@ def test_login_precheck_with_enforced_sso(self): with self.settings(**GOOGLE_MOCK_SETTINGS): response = self.client.post("/api/login/precheck", {"email": "spain@witw.app"}) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json(), {"sso_enforcement": "google-oauth2", "saml_available": False}) + self.assertEqual( + response.json(), + {"sso_enforcement": "google-oauth2", "saml_available": False}, + ) def test_login_precheck_with_unverified_domain(self): OrganizationDomain.objects.create( @@ -117,14 +123,20 @@ def test_can_enforce_sso(self): # Can log in with password with SSO configured but not enforced with self.settings(**GOOGLE_MOCK_SETTINGS): - response = self.client.post("/api/login", {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}) + response = self.client.post( + "/api/login", + {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json(), {"success": True}) # Forcing SSO disables regular API password login self.create_enforced_domain() with self.settings(**GOOGLE_MOCK_SETTINGS): - response = self.client.post("/api/login", {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}) + response = self.client.post( + "/api/login", + {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), @@ -144,7 +156,10 @@ def test_can_enforce_sso_on_cloud_enviroment(self): self.organization.save() with self.settings(**GOOGLE_MOCK_SETTINGS): - response = self.client.post("/api/login", {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}) + response = self.client.post( + "/api/login", + {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), @@ -158,7 +173,11 @@ def test_can_enforce_sso_on_cloud_enviroment(self): def test_cannot_reset_password_with_enforced_sso(self): self.create_enforced_domain() - with self.settings(**GOOGLE_MOCK_SETTINGS, EMAIL_HOST="localhost", SITE_URL="https://my.posthog.net"): + with self.settings( + **GOOGLE_MOCK_SETTINGS, + EMAIL_HOST="localhost", + SITE_URL="https://my.posthog.net", + ): response = self.client.post("/api/reset/", {"email": "i_dont_exist@posthog.com"}) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -182,7 +201,10 @@ def test_cannot_enforce_sso_without_a_license(self, mock_warning): # Enforcement is ignored with self.settings(**GOOGLE_MOCK_SETTINGS): - response = self.client.post("/api/login", {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}) + response = self.client.post( + "/api/login", + {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json(), {"success": True}) @@ -247,7 +269,6 @@ def setUpTestData(cls): # SAML Metadata def test_can_get_saml_metadata(self): - self.client.force_login(self.user) OrganizationMembership.objects.filter(organization=self.organization, user=self.user).update( @@ -307,7 +328,10 @@ def test_cannot_initiate_saml_flow_for_unconfigured_domain(self): with self.assertRaises(AuthFailed) as e: self.client.get("/login/saml/?email=hellohello@gmail.com") - self.assertEqual(str(e.exception), "Authentication failed: SAML not configured for this user.") + self.assertEqual( + str(e.exception), + "Authentication failed: SAML not configured for this user.", + ) def test_cannot_initiate_saml_flow_for_unverified_domain(self): """ @@ -320,13 +344,15 @@ def test_cannot_initiate_saml_flow_for_unverified_domain(self): with self.assertRaises(AuthFailed) as e: self.client.get("/login/saml/?email=hellohello@gmail.com") - self.assertEqual(str(e.exception), "Authentication failed: SAML not configured for this user.") + self.assertEqual( + str(e.exception), + "Authentication failed: SAML not configured for this user.", + ) # Finish SAML flow (i.e. actual log in) @freeze_time("2021-08-25T22:09:14.252Z") # Ensures the SAML timestamp validation passes def test_can_login_with_saml(self): - user = User.objects.create(email="engineering@posthog.com", distinct_id=str(uuid.uuid4())) response = self.client.get("/login/saml/?email=engineering@posthog.com") @@ -336,12 +362,19 @@ def test_can_login_with_saml(self): _session.update({"saml_state": "ONELOGIN_87856a50b5490e643b1ebef9cb5bf6e78225a3c6"}) _session.save() - with open(os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), "r", encoding="utf_8") as f: + with open( + os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), + "r", + encoding="utf_8", + ) as f: saml_response = f.read() response = self.client.post( "/complete/saml/", - {"SAMLResponse": saml_response, "RelayState": str(self.organization_domain.id)}, + { + "SAMLResponse": saml_response, + "RelayState": str(self.organization_domain.id), + }, follow=True, format="multipart", ) @@ -373,7 +406,9 @@ def test_saml_jit_provisioning_and_assertion_with_different_attribute_names(self _session.save() with open( - os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response_alt_attribute_names"), "r", encoding="utf_8" + os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response_alt_attribute_names"), + "r", + encoding="utf_8", ) as f: saml_response = f.read() @@ -381,7 +416,10 @@ def test_saml_jit_provisioning_and_assertion_with_different_attribute_names(self response = self.client.post( "/complete/saml/", - {"SAMLResponse": saml_response, "RelayState": str(self.organization_domain.id)}, + { + "SAMLResponse": saml_response, + "RelayState": str(self.organization_domain.id), + }, format="multipart", follow=True, ) @@ -434,7 +472,11 @@ def test_cannot_login_with_improperly_signed_payload(self): _session.update({"saml_state": "ONELOGIN_87856a50b5490e643b1ebef9cb5bf6e78225a3c6"}) _session.save() - with open(os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), "r", encoding="utf_8") as f: + with open( + os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), + "r", + encoding="utf_8", + ) as f: saml_response = f.read() user_count = User.objects.count() @@ -442,7 +484,10 @@ def test_cannot_login_with_improperly_signed_payload(self): with self.assertRaises(AuthFailed) as e: response = self.client.post( "/complete/saml/", - {"SAMLResponse": saml_response, "RelayState": str(self.organization_domain.id)}, + { + "SAMLResponse": saml_response, + "RelayState": str(self.organization_domain.id), + }, format="multipart", follow=True, ) @@ -467,14 +512,21 @@ def test_cannot_signup_with_saml_if_jit_provisioning_is_disabled(self): _session.update({"saml_state": "ONELOGIN_87856a50b5490e643b1ebef9cb5bf6e78225a3c6"}) _session.save() - with open(os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), "r", encoding="utf_8") as f: + with open( + os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), + "r", + encoding="utf_8", + ) as f: saml_response = f.read() user_count = User.objects.count() response = self.client.post( "/complete/saml/", - {"SAMLResponse": saml_response, "RelayState": str(self.organization_domain.id)}, + { + "SAMLResponse": saml_response, + "RelayState": str(self.organization_domain.id), + }, format="multipart", follow=True, ) @@ -499,7 +551,9 @@ def test_cannot_create_account_without_first_name_in_payload(self): _session.save() with open( - os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response_no_first_name"), "r", encoding="utf_8" + os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response_no_first_name"), + "r", + encoding="utf_8", ) as f: saml_response = f.read() @@ -508,12 +562,18 @@ def test_cannot_create_account_without_first_name_in_payload(self): with self.assertRaises(ValidationError) as e: response = self.client.post( "/complete/saml/", - {"SAMLResponse": saml_response, "RelayState": str(self.organization_domain.id)}, + { + "SAMLResponse": saml_response, + "RelayState": str(self.organization_domain.id), + }, format="multipart", follow=True, ) - self.assertEqual(str(e.exception), "{'name': ['This field is required and was not provided by the IdP.']}") + self.assertEqual( + str(e.exception), + "{'name': ['This field is required and was not provided by the IdP.']}", + ) self.assertEqual(User.objects.count(), user_count) @@ -532,19 +592,27 @@ def test_cannot_login_with_saml_on_unverified_domain(self): _session.update({"saml_state": "ONELOGIN_87856a50b5490e643b1ebef9cb5bf6e78225a3c6"}) _session.save() - with open(os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), "r", encoding="utf_8") as f: + with open( + os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), + "r", + encoding="utf_8", + ) as f: saml_response = f.read() with self.assertRaises(AuthFailed) as e: response = self.client.post( "/complete/saml/", - {"SAMLResponse": saml_response, "RelayState": str(self.organization_domain.id)}, + { + "SAMLResponse": saml_response, + "RelayState": str(self.organization_domain.id), + }, follow=True, format="multipart", ) self.assertEqual( - str(e.exception), "Authentication failed: Authentication request is invalid. Invalid RelayState." + str(e.exception), + "Authentication failed: Authentication request is invalid. Invalid RelayState.", ) # Assert user is not logged in @@ -552,14 +620,16 @@ def test_cannot_login_with_saml_on_unverified_domain(self): self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_saml_can_be_enforced(self): - User.objects.create_and_join( - organization=self.organization, email="engineering@posthog.com", password=self.CONFIG_PASSWORD + organization=self.organization, + email="engineering@posthog.com", + password=self.CONFIG_PASSWORD, ) # Can log in regularly with SAML configured response = self.client.post( - "/api/login", {"email": "engineering@posthog.com", "password": self.CONFIG_PASSWORD} + "/api/login", + {"email": "engineering@posthog.com", "password": self.CONFIG_PASSWORD}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json(), {"success": True}) @@ -568,7 +638,8 @@ def test_saml_can_be_enforced(self): self.organization_domain.sso_enforcement = "saml" self.organization_domain.save() response = self.client.post( - "/api/login", {"email": "engineering@posthog.com", "password": self.CONFIG_PASSWORD} + "/api/login", + {"email": "engineering@posthog.com", "password": self.CONFIG_PASSWORD}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -601,7 +672,8 @@ def test_cannot_use_saml_without_enterprise_license(self): with self.assertRaises(AuthFailed) as e: response = self.client.get("/login/saml/?email=engineering@posthog.com") self.assertEqual( - str(e.exception), "Authentication failed: Your organization does not have the required license to use SAML." + str(e.exception), + "Authentication failed: Your organization does not have the required license to use SAML.", ) # Attempting to use SAML fails @@ -609,17 +681,25 @@ def test_cannot_use_saml_without_enterprise_license(self): _session.update({"saml_state": "ONELOGIN_87856a50b5490e643b1ebef9cb5bf6e78225a3c6"}) _session.save() - with open(os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), "r", encoding="utf_8") as f: + with open( + os.path.join(CURRENT_FOLDER, "fixtures/saml_login_response"), + "r", + encoding="utf_8", + ) as f: saml_response = f.read() with self.assertRaises(AuthFailed) as e: response = self.client.post( "/complete/saml/", - {"SAMLResponse": saml_response, "RelayState": str(self.organization_domain.id)}, + { + "SAMLResponse": saml_response, + "RelayState": str(self.organization_domain.id), + }, follow=True, format="multipart", ) self.assertEqual( - str(e.exception), "Authentication failed: Your organization does not have the required license to use SAML." + str(e.exception), + "Authentication failed: Your organization does not have the required license to use SAML.", ) diff --git a/ee/api/test/test_billing.py b/ee/api/test/test_billing.py index 0b5297e9d9261..87838d0b39dcc 100644 --- a/ee/api/test/test_billing.py +++ b/ee/api/test/test_billing.py @@ -13,7 +13,10 @@ from ee.api.test.base import APILicensedTest from ee.billing.billing_types import BillingPeriod, CustomerInfo, CustomerProduct from ee.models.license import License -from posthog.cloud_utils import TEST_clear_instance_license_cache, get_cached_instance_license +from posthog.cloud_utils import ( + TEST_clear_instance_license_cache, + get_cached_instance_license, +) from posthog.models.organization import OrganizationMembership from posthog.models.team import Team from posthog.test.base import APIBaseTest, _create_event, flush_persons_and_events @@ -34,9 +37,13 @@ def create_missing_billing_customer(**kwargs) -> CustomerInfo: current_total_amount_usd="0.00", products=None, billing_period=BillingPeriod( - current_period_start="2022-10-07T11:12:48", current_period_end="2022-11-07T11:12:48" + current_period_start="2022-10-07T11:12:48", + current_period_end="2022-11-07T11:12:48", ), - usage_summary={"events": {"limit": None, "usage": 0}, "recordings": {"limit": None, "usage": 0}}, + usage_summary={ + "events": {"limit": None, "usage": 0}, + "recordings": {"limit": None, "usage": 0}, + }, free_trial_until=None, available_features=[], ) @@ -60,8 +67,16 @@ def create_billing_customer(**kwargs) -> CustomerInfo: image_url="https://posthog.com/static/images/product-os.png", free_allocation=10000, tiers=[ - {"unit_amount_usd": "0.00", "up_to": 1000000, "current_amount_usd": "0.00"}, - {"unit_amount_usd": "0.00045", "up_to": 2000000, "current_amount_usd": None}, + { + "unit_amount_usd": "0.00", + "up_to": 1000000, + "current_amount_usd": "0.00", + }, + { + "unit_amount_usd": "0.00045", + "up_to": 2000000, + "current_amount_usd": None, + }, ], tiered=True, unit_amount_usd="0.00", @@ -75,9 +90,13 @@ def create_billing_customer(**kwargs) -> CustomerInfo: ) ], billing_period=BillingPeriod( - current_period_start="2022-10-07T11:12:48", current_period_end="2022-11-07T11:12:48" + current_period_start="2022-10-07T11:12:48", + current_period_end="2022-11-07T11:12:48", ), - usage_summary={"events": {"limit": None, "usage": 0}, "recordings": {"limit": None, "usage": 0}}, + usage_summary={ + "events": {"limit": None, "usage": 0}, + "recordings": {"limit": None, "usage": 0}, + }, free_trial_until=None, ) data.update(kwargs) @@ -95,8 +114,16 @@ def create_billing_products_response(**kwargs) -> Dict[str, List[CustomerProduct image_url="https://posthog.com/static/images/product-os.png", free_allocation=10000, tiers=[ - {"unit_amount_usd": "0.00", "up_to": 1000000, "current_amount_usd": "0.00"}, - {"unit_amount_usd": "0.00045", "up_to": 2000000, "current_amount_usd": None}, + { + "unit_amount_usd": "0.00", + "up_to": 1000000, + "current_amount_usd": "0.00", + }, + { + "unit_amount_usd": "0.00045", + "up_to": 2000000, + "current_amount_usd": None, + }, ], tiered=True, unit_amount_usd="0.00", @@ -182,7 +209,11 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma secret = self.license.key.split("::")[1] decoded_token = jwt.decode( - token, secret, algorithms=["HS256"], audience="posthog:license-key", options={"verify_aud": True} + token, + secret, + algorithms=["HS256"], + audience="posthog:license-key", + options={"verify_aud": True}, ) assert decoded_token == { @@ -232,8 +263,16 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma "image_url": "https://posthog.com/static/images/product-os.png", "free_allocation": 10000, "tiers": [ - {"unit_amount_usd": "0.00", "up_to": 1000000, "current_amount_usd": "0.00"}, - {"unit_amount_usd": "0.00045", "up_to": 2000000, "current_amount_usd": None}, + { + "unit_amount_usd": "0.00", + "up_to": 1000000, + "current_amount_usd": "0.00", + }, + { + "unit_amount_usd": "0.00045", + "up_to": 2000000, + "current_amount_usd": None, + }, ], "tiered": True, "current_amount_usd": "0.00", @@ -250,7 +289,10 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma "current_period_start": "2022-10-07T11:12:48", "current_period_end": "2022-11-07T11:12:48", }, - "usage_summary": {"events": {"limit": None, "usage": 0}, "recordings": {"limit": None, "usage": 0}}, + "usage_summary": { + "events": {"limit": None, "usage": 0}, + "recordings": {"limit": None, "usage": 0}, + }, "free_trial_until": None, } @@ -290,8 +332,16 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma "type": "events", "free_allocation": 10000, "tiers": [ - {"unit_amount_usd": "0.00", "up_to": 1000000, "current_amount_usd": "0.00"}, - {"unit_amount_usd": "0.00045", "up_to": 2000000, "current_amount_usd": None}, + { + "unit_amount_usd": "0.00", + "up_to": 1000000, + "current_amount_usd": "0.00", + }, + { + "unit_amount_usd": "0.00045", + "up_to": 2000000, + "current_amount_usd": None, + }, ], "current_usage": 0, "percentage_usage": 0.0, @@ -310,7 +360,10 @@ def mock_implementation(url: str, headers: Any = None, params: Any = None) -> Ma "current_period_start": "2022-10-07T11:12:48", "current_period_end": "2022-11-07T11:12:48", }, - "usage_summary": {"events": {"limit": None, "usage": 0}, "recordings": {"limit": None, "usage": 0}}, + "usage_summary": { + "events": {"limit": None, "usage": 0}, + "recordings": {"limit": None, "usage": 0}, + }, "free_trial_until": None, "current_total_amount_usd": "0.00", "deactivated": False, diff --git a/ee/api/test/test_capture.py b/ee/api/test/test_capture.py index 27e907d7cbf62..891a9759a80c5 100644 --- a/ee/api/test/test_capture.py +++ b/ee/api/test/test_capture.py @@ -27,8 +27,20 @@ def test_produce_to_kafka(self, kafka_produce): { "data": json.dumps( [ - {"event": "event1", "properties": {"distinct_id": "id1", "token": self.team.api_token}}, - {"event": "event2", "properties": {"distinct_id": "id2", "token": self.team.api_token}}, + { + "event": "event1", + "properties": { + "distinct_id": "id1", + "token": self.team.api_token, + }, + }, + { + "event": "event2", + "properties": { + "distinct_id": "id2", + "token": self.team.api_token, + }, + }, ] ), "api_key": self.team.api_token, @@ -87,7 +99,10 @@ def test_capture_event_with_uuid_in_payload(self, kafka_produce): { "event": "event1", "uuid": "017d37c1-f285-0000-0e8b-e02d131925dc", - "properties": {"distinct_id": "id1", "token": self.team.api_token}, + "properties": { + "distinct_id": "id1", + "token": self.team.api_token, + }, } ] ), @@ -110,7 +125,15 @@ def test_kafka_connection_error(self, kafka_produce): "/capture/", { "data": json.dumps( - [{"event": "event1", "properties": {"distinct_id": "id1", "token": self.team.api_token}}] + [ + { + "event": "event1", + "properties": { + "distinct_id": "id1", + "token": self.team.api_token, + }, + } + ] ), "api_key": self.team.api_token, }, @@ -135,7 +158,15 @@ def test_partition_key_override(self, kafka_produce): "/capture/", { "data": json.dumps( - [{"event": "event1", "properties": {"distinct_id": "id1", "token": self.team.api_token}}] + [ + { + "event": "event1", + "properties": { + "distinct_id": "id1", + "token": self.team.api_token, + }, + } + ] ), "api_key": self.team.api_token, }, @@ -143,7 +174,10 @@ def test_partition_key_override(self, kafka_produce): # By default we use (the hash of) as the partition key kafka_produce_call = kafka_produce.call_args_list[0].kwargs - self.assertEqual(kafka_produce_call["key"], hashlib.sha256(default_partition_key.encode()).hexdigest()) + self.assertEqual( + kafka_produce_call["key"], + hashlib.sha256(default_partition_key.encode()).hexdigest(), + ) # Setting up an override via EVENT_PARTITION_KEYS_TO_OVERRIDE should cause us to pass None # as the key when producing to Kafka, leading to random partitioning @@ -152,7 +186,15 @@ def test_partition_key_override(self, kafka_produce): "/capture/", { "data": json.dumps( - [{"event": "event1", "properties": {"distinct_id": "id1", "token": self.team.api_token}}] + [ + { + "event": "event1", + "properties": { + "distinct_id": "id1", + "token": self.team.api_token, + }, + } + ] ), "api_key": self.team.api_token, }, diff --git a/ee/api/test/test_dashboard.py b/ee/api/test/test_dashboard.py index 58905c05f2257..cc0833991ba7f 100644 --- a/ee/api/test/test_dashboard.py +++ b/ee/api/test/test_dashboard.py @@ -147,7 +147,8 @@ def test_can_edit_restricted_dashboard_as_creator_who_is_project_member(self): ) response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", {"name": "Gentle Antelope"} + f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", + {"name": "Gentle Antelope"}, ) response_data = response.json() @@ -173,13 +174,15 @@ def test_cannot_edit_restricted_dashboard_as_other_user_who_is_project_member(se ) response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", {"name": "Gentle Antelope"} + f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", + {"name": "Gentle Antelope"}, ) response_data = response.json() self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEquals( - response_data, self.permission_denied_response("You don't have edit permissions for this dashboard.") + response_data, + self.permission_denied_response("You don't have edit permissions for this dashboard."), ) def test_can_edit_restricted_dashboard_as_other_user_who_is_project_admin(self): @@ -194,7 +197,8 @@ def test_can_edit_restricted_dashboard_as_other_user_who_is_project_admin(self): ) response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", {"name": "Gentle Antelope"} + f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", + {"name": "Gentle Antelope"}, ) response_data = response.json() @@ -228,12 +232,15 @@ def test_dashboard_no_duplicate_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) dashboard = Dashboard.objects.create(team=self.team, name="Edit-restricted dashboard", created_by=self.user) response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", {"tags": ["a", "b", "a"]} + f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", + {"tags": ["a", "b", "a"]}, ) self.assertListEqual(sorted(response.json()["tags"]), ["a", "b"]) @@ -250,14 +257,16 @@ def test_sharing_edits_limited_to_collaborators(self): ) response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/dashboards/{dashboard.id}/sharing", + {"enabled": True}, ) response_data = response.json() self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEquals( - response_data, self.permission_denied_response("You don't have edit permissions for this dashboard.") + response_data, + self.permission_denied_response("You don't have edit permissions for this dashboard."), ) def test_cannot_edit_dashboard_description_when_collaboration_not_available(self): @@ -281,7 +290,10 @@ def test_cannot_edit_dashboard_description_when_collaboration_not_available(self response = self.client.patch( f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", - {"description": "i should not be allowed to edit this", "name": "even though I am allowed to edit this"}, + { + "description": "i should not be allowed to edit this", + "name": "even though I am allowed to edit this", + }, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -311,7 +323,10 @@ def test_can_edit_dashboard_description_when_collaboration_is_available(self): response = self.client.patch( f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", - {"description": "i should be allowed to edit this", "name": "and so also to edit this"}, + { + "description": "i should be allowed to edit this", + "name": "and so also to edit this", + }, ) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/ee/api/test/test_dashboard_collaborators.py b/ee/api/test/test_dashboard_collaborators.py index 3f7b71663f290..c7be5f7cf5276 100644 --- a/ee/api/test/test_dashboard_collaborators.py +++ b/ee/api/test/test_dashboard_collaborators.py @@ -21,10 +21,14 @@ def test_list_collaborators_as_person_without_edit_access(self): other_user_a = User.objects.create_and_join(self.organization, "a@x.com", None) other_user_b = User.objects.create_and_join(self.organization, "b@x.com", None) DashboardPrivilege.objects.create( - user=other_user_a, dashboard=self.test_dashboard, level=Dashboard.PrivilegeLevel.CAN_VIEW + user=other_user_a, + dashboard=self.test_dashboard, + level=Dashboard.PrivilegeLevel.CAN_VIEW, ) DashboardPrivilege.objects.create( - user=other_user_b, dashboard=self.test_dashboard, level=Dashboard.PrivilegeLevel.CAN_EDIT + user=other_user_b, + dashboard=self.test_dashboard, + level=Dashboard.PrivilegeLevel.CAN_EDIT, ) response = self.client.get( @@ -48,7 +52,10 @@ def test_cannot_add_collaborator_to_unrestricted_dashboard_as_creator(self): response = self.client.post( f"/api/projects/{self.test_dashboard.team_id}/dashboards/{self.test_dashboard.id}/collaborators/", - {"user_uuid": str(other_user.uuid), "level": Dashboard.PrivilegeLevel.CAN_EDIT}, + { + "user_uuid": str(other_user.uuid), + "level": Dashboard.PrivilegeLevel.CAN_EDIT, + }, ) response_data = response.json() @@ -67,7 +74,10 @@ def test_can_add_collaborator_to_edit_restricted_dashboard_as_creator(self): response = self.client.post( f"/api/projects/{self.test_dashboard.team_id}/dashboards/{self.test_dashboard.id}/collaborators/", - {"user_uuid": str(other_user.uuid), "level": Dashboard.PrivilegeLevel.CAN_EDIT}, + { + "user_uuid": str(other_user.uuid), + "level": Dashboard.PrivilegeLevel.CAN_EDIT, + }, ) response_data = response.json() @@ -84,7 +94,10 @@ def test_cannot_add_yourself_to_restricted_dashboard_as_creator(self): response = self.client.post( f"/api/projects/{self.test_dashboard.team_id}/dashboards/{self.test_dashboard.id}/collaborators/", - {"user_uuid": str(self.user.uuid), "level": Dashboard.PrivilegeLevel.CAN_EDIT}, + { + "user_uuid": str(self.user.uuid), + "level": Dashboard.PrivilegeLevel.CAN_EDIT, + }, ) response_data = response.json() @@ -106,13 +119,17 @@ def test_cannot_add_collaborator_to_edit_restricted_dashboard_as_other_user(self response = self.client.post( f"/api/projects/{self.test_dashboard.team_id}/dashboards/{self.test_dashboard.id}/collaborators/", - {"user_uuid": str(other_user.uuid), "level": Dashboard.PrivilegeLevel.CAN_EDIT}, + { + "user_uuid": str(other_user.uuid), + "level": Dashboard.PrivilegeLevel.CAN_EDIT, + }, ) response_data = response.json() self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response_data, self.permission_denied_response("You don't have edit permissions for this dashboard.") + response_data, + self.permission_denied_response("You don't have edit permissions for this dashboard."), ) def test_cannot_add_collaborator_from_other_org_to_edit_restricted_dashboard_as_creator(self): @@ -124,7 +141,10 @@ def test_cannot_add_collaborator_from_other_org_to_edit_restricted_dashboard_as_ response = self.client.post( f"/api/projects/{self.test_dashboard.team_id}/dashboards/{self.test_dashboard.id}/collaborators/", - {"user_uuid": str(other_user.uuid), "level": Dashboard.PrivilegeLevel.CAN_EDIT}, + { + "user_uuid": str(other_user.uuid), + "level": Dashboard.PrivilegeLevel.CAN_EDIT, + }, ) response_data = response.json() @@ -144,12 +164,18 @@ def test_cannot_add_collaborator_to_other_org_to_edit_restricted_dashboard_as_cr response = self.client.post( f"/api/projects/{self.test_dashboard.team_id}/dashboards/{self.test_dashboard.id}/collaborators/", - {"user_uuid": str(other_user.uuid), "level": Dashboard.PrivilegeLevel.CAN_EDIT}, + { + "user_uuid": str(other_user.uuid), + "level": Dashboard.PrivilegeLevel.CAN_EDIT, + }, ) response_data = response.json() self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(response_data, self.permission_denied_response("You don't have access to the project.")) + self.assertEqual( + response_data, + self.permission_denied_response("You don't have access to the project."), + ) def test_cannot_update_existing_collaborator(self): # This will change once there are more levels, but with just two it doesn't make sense to PATCH privileges @@ -159,7 +185,9 @@ def test_cannot_update_existing_collaborator(self): self.test_dashboard.save() other_user = User.objects.create_and_join(self.organization, "a@x.com", None) DashboardPrivilege.objects.create( - user=other_user, dashboard=self.test_dashboard, level=Dashboard.PrivilegeLevel.CAN_EDIT + user=other_user, + dashboard=self.test_dashboard, + level=Dashboard.PrivilegeLevel.CAN_EDIT, ) response = self.client.patch( @@ -176,7 +204,9 @@ def test_cannot_remove_collaborator_from_unrestricted_dashboard_as_creator(self) self.test_dashboard.save() other_user = User.objects.create_and_join(self.organization, "a@x.com", None) DashboardPrivilege.objects.create( - user=other_user, dashboard=self.test_dashboard, level=Dashboard.PrivilegeLevel.CAN_EDIT + user=other_user, + dashboard=self.test_dashboard, + level=Dashboard.PrivilegeLevel.CAN_EDIT, ) response = self.client.delete( @@ -199,7 +229,9 @@ def test_can_remove_collaborator_from_restricted_dashboard_as_creator(self): self.test_dashboard.save() other_user = User.objects.create_and_join(self.organization, "a@x.com", None) DashboardPrivilege.objects.create( - user=other_user, dashboard=self.test_dashboard, level=Dashboard.PrivilegeLevel.CAN_EDIT + user=other_user, + dashboard=self.test_dashboard, + level=Dashboard.PrivilegeLevel.CAN_EDIT, ) response = self.client.delete( @@ -216,7 +248,9 @@ def test_cannot_remove_collaborator_from_restricted_dashboard_as_other_user(self self.test_dashboard.save() other_user = User.objects.create_and_join(self.organization, "a@x.com", None) DashboardPrivilege.objects.create( - user=other_user, dashboard=self.test_dashboard, level=Dashboard.PrivilegeLevel.CAN_EDIT + user=other_user, + dashboard=self.test_dashboard, + level=Dashboard.PrivilegeLevel.CAN_EDIT, ) response = self.client.delete( @@ -226,5 +260,6 @@ def test_cannot_remove_collaborator_from_restricted_dashboard_as_other_user(self self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response_data, self.permission_denied_response("You don't have edit permissions for this dashboard.") + response_data, + self.permission_denied_response("You don't have edit permissions for this dashboard."), ) diff --git a/ee/api/test/test_debug_ch_queries.py b/ee/api/test/test_debug_ch_queries.py index d16c05fb79c32..f7c785c4e1aa1 100644 --- a/ee/api/test/test_debug_ch_queries.py +++ b/ee/api/test/test_debug_ch_queries.py @@ -9,7 +9,6 @@ class TestProjectEnterpriseAPI(APIBaseTest): CLASS_DATA_LEVEL_SETUP = False def test_denied(self): - with patch("ee.api.debug_ch_queries.is_cloud", return_value=True): with patch("ee.api.debug_ch_queries.DEBUG", True): resp = self.client.get("/api/debug_ch_queries/") diff --git a/ee/api/test/test_event_definition.py b/ee/api/test/test_event_definition.py index 7eb807caa33d1..6e3cbb8775fb9 100644 --- a/ee/api/test/test_event_definition.py +++ b/ee/api/test/test_event_definition.py @@ -104,7 +104,9 @@ def test_retrieve_existing_event_definition(self): self.assertEqual(response_data["owner"]["id"], self.user.id) self.assertAlmostEqual( - (timezone.now() - dateutil.parser.isoparse(response_data["created_at"])).total_seconds(), 0, delta=1 + (timezone.now() - dateutil.parser.isoparse(response_data["created_at"])).total_seconds(), + 0, + delta=1, ) self.assertIn("last_seen_at", response_data) @@ -139,7 +141,8 @@ def test_search_event_definition(self): self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() self.assertEqual( - sorted([r["name"] for r in response_data["results"]]), ["entered_free_trial", "enterprise event"] + sorted([r["name"] for r in response_data["results"]]), + ["entered_free_trial", "enterprise event"], ) self.assertEqual(response_data["results"][1]["name"], "enterprise event") @@ -156,7 +159,8 @@ def test_search_event_definition(self): self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() self.assertEqual( - sorted([r["name"] for r in response_data["results"]]), ["$pageview", "enterprise event", "regular event"] + sorted([r["name"] for r in response_data["results"]]), + ["$pageview", "enterprise event", "regular event"], ) response = self.client.get(f"/api/projects/@current/event_definitions/?search=bust") @@ -180,7 +184,10 @@ def test_update_event_definition(self): event.refresh_from_db() self.assertEqual(event.description, "This is a description.") - self.assertEqual(set(event.tagged_items.values_list("tag__name", flat=True)), {"official", "internal"}) + self.assertEqual( + set(event.tagged_items.values_list("tag__name", flat=True)), + {"official", "internal"}, + ) activity_log: Optional[ActivityLog] = ActivityLog.objects.first() assert activity_log is not None @@ -208,10 +215,14 @@ def test_update_event_definition(self): def test_update_event_without_license(self): event = EnterpriseEventDefinition.objects.create(team=self.demo_team, name="enterprise event") response = self.client.patch( - f"/api/projects/@current/event_definitions/{str(event.id)}", data={"description": "test"} + f"/api/projects/@current/event_definitions/{str(event.id)}", + data={"description": "test"}, ) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertIn("Self-hosted licenses are no longer available for purchase.", response.json()["detail"]) + self.assertIn( + "Self-hosted licenses are no longer available for purchase.", + response.json()["detail"], + ) def test_with_expired_license(self): super(LicenseManager, cast(LicenseManager, License.objects)).create( @@ -219,10 +230,14 @@ def test_with_expired_license(self): ) event = EnterpriseEventDefinition.objects.create(team=self.demo_team, name="description test") response = self.client.patch( - f"/api/projects/@current/event_definitions/{str(event.id)}", data={"description": "test"} + f"/api/projects/@current/event_definitions/{str(event.id)}", + data={"description": "test"}, ) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertIn("Self-hosted licenses are no longer available for purchase.", response.json()["detail"]) + self.assertIn( + "Self-hosted licenses are no longer available for purchase.", + response.json()["detail"], + ) def test_can_get_event_verification_data(self): super(LicenseManager, cast(LicenseManager, License.objects)).create( @@ -297,7 +312,10 @@ def test_verify_then_verify_again_no_change(self): assert response.json()["updated_at"] == "2020-01-02T00:00:00Z" with freeze_time("2020-01-02T00:01:00Z"): - self.client.patch(f"/api/projects/@current/event_definitions/{event.id}", {"verified": True}) + self.client.patch( + f"/api/projects/@current/event_definitions/{event.id}", + {"verified": True}, + ) response = self.client.get(f"/api/projects/@current/event_definitions/{event.id}") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -338,11 +356,14 @@ def test_event_definition_no_duplicate_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) event = EnterpriseEventDefinition.objects.create(team=self.demo_team, name="enterprise event") response = self.client.patch( - f"/api/projects/@current/event_definitions/{str(event.id)}", data={"tags": ["a", "b", "a"]} + f"/api/projects/@current/event_definitions/{str(event.id)}", + data={"tags": ["a", "b", "a"]}, ) self.assertListEqual(sorted(response.json()["tags"]), ["a", "b"]) diff --git a/ee/api/test/test_feature_flag_role_access.py b/ee/api/test/test_feature_flag_role_access.py index ad3286f4b0f91..f143f10505f0f 100644 --- a/ee/api/test/test_feature_flag_role_access.py +++ b/ee/api/test/test_feature_flag_role_access.py @@ -15,7 +15,10 @@ def setUp(self): self.eng_role = Role.objects.create(name="Engineering", organization=self.organization) self.marketing_role = Role.objects.create(name="Marketing", organization=self.organization) self.feature_flag = FeatureFlag.objects.create( - created_by=self.user, team=self.team, key="flag_role_access", name="Flag role access" + created_by=self.user, + team=self.team, + key="flag_role_access", + name="Flag role access", ) def test_can_always_add_role_access_if_creator_of_feature_flag(self): @@ -49,7 +52,10 @@ def test_cannot_add_role_access_if_feature_flags_access_level_too_low_and_not_cr ) response_data = res.json() self.assertEqual(res.status_code, status.HTTP_403_FORBIDDEN) - self.assertEqual(response_data, self.permission_denied_response("You can't edit roles for this feature flag.")) + self.assertEqual( + response_data, + self.permission_denied_response("You can't edit roles for this feature flag."), + ) def test_can_add_role_access_if_role_feature_flags_access_level_allows(self): OrganizationResourceAccess.objects.create( @@ -61,7 +67,8 @@ def test_can_add_role_access_if_role_feature_flags_access_level_allows(self): self.organization.save() self.organization_membership.save() self.client.post( - f"/api/organizations/@current/roles/{self.eng_role.id}/role_memberships", {"user_uuid": self.user.uuid} + f"/api/organizations/@current/roles/{self.eng_role.id}/role_memberships", + {"user_uuid": self.user.uuid}, ) self.assertEqual( self.user.role_memberships.first().role.feature_flags_access_level, # type: ignore @@ -96,7 +103,8 @@ def test_feature_flag_permission_changes(self): # Add role membership and feature flag access level self.client.post( - f"/api/organizations/@current/roles/{self.eng_role.id}/role_memberships", {"user_uuid": self.user.uuid} + f"/api/organizations/@current/roles/{self.eng_role.id}/role_memberships", + {"user_uuid": self.user.uuid}, ) self.client.post( @@ -137,5 +145,6 @@ def test_can_always_edit_if_admin_or_higher(self): ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("You can't edit roles for this feature flag.") + response.json(), + self.permission_denied_response("You can't edit roles for this feature flag."), ) diff --git a/ee/api/test/test_hooks.py b/ee/api/test/test_hooks.py index f94f3165ebddd..e49f3dbd17126 100644 --- a/ee/api/test/test_hooks.py +++ b/ee/api/test/test_hooks.py @@ -30,7 +30,11 @@ def test_create_hook(self): ) def test_create_hook_with_resource_id(self): - data = {"target": "https://hooks.zapier.com/abcd/", "event": "action_performed", "resource_id": "66"} + data = { + "target": "https://hooks.zapier.com/abcd/", + "event": "action_performed", + "resource_id": "66", + } response = self.client.post(f"/api/projects/{self.team.id}/hooks/", data) response_data = response.json() @@ -58,13 +62,15 @@ def test_delete_hook(self): self.assertEqual(response.status_code, 204) def test_invalid_target(self): - data = {"target": "https://hooks.non-zapier.com/abcd/", "event": "action_performed"} + data = { + "target": "https://hooks.non-zapier.com/abcd/", + "event": "action_performed", + } response = self.client.post(f"/api/projects/{self.team.id}/hooks/", data) self.assertEqual(response.status_code, 400) def test_valid_domain() -> None: - test_cases = { "http://hooks.zapier.com": True, "https://hooks.zapier.com": True, diff --git a/ee/api/test/test_insight.py b/ee/api/test/test_insight.py index b8a64853b1bcd..00863551500ee 100644 --- a/ee/api/test/test_insight.py +++ b/ee/api/test/test_insight.py @@ -9,7 +9,13 @@ from ee.api.test.base import APILicensedTest from ee.models import ExplicitTeamMembership, DashboardPrivilege from posthog.api.test.dashboards import DashboardAPI -from posthog.models import Dashboard, DashboardTile, Insight, OrganizationMembership, User +from posthog.models import ( + Dashboard, + DashboardTile, + Insight, + OrganizationMembership, + User, +) from posthog.test.base import FuzzyInt, snapshot_postgres_queries from posthog.test.db_context_capturing import capture_db_queries @@ -311,13 +317,18 @@ def test_event_definition_no_duplicate_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) dashboard = Dashboard.objects.create(team=self.team, name="Edit-restricted dashboard") insight = Insight.objects.create(team=self.team, name="XYZ", created_by=self.user) DashboardTile.objects.create(dashboard=dashboard, insight=insight) - response = self.client.patch(f"/api/projects/{self.team.id}/insights/{insight.id}", {"tags": ["a", "b", "a"]}) + response = self.client.patch( + f"/api/projects/{self.team.id}/insights/{insight.id}", + {"tags": ["a", "b", "a"]}, + ) self.assertListEqual(sorted(response.json()["tags"]), ["a", "b"]) @@ -565,7 +576,13 @@ def test_listing_insights_does_not_nplus1(self) -> None: # adding more insights doesn't change the query count self.assertEqual( - [FuzzyInt(11, 12), FuzzyInt(11, 12), FuzzyInt(11, 12), FuzzyInt(11, 12), FuzzyInt(11, 12)], + [ + FuzzyInt(11, 12), + FuzzyInt(11, 12), + FuzzyInt(11, 12), + FuzzyInt(11, 12), + FuzzyInt(11, 12), + ], query_counts, f"received query counts\n\n{query_counts}", ) diff --git a/ee/api/test/test_instance_settings.py b/ee/api/test/test_instance_settings.py index de391bfc923a7..4177eb167a3e0 100644 --- a/ee/api/test/test_instance_settings.py +++ b/ee/api/test/test_instance_settings.py @@ -5,7 +5,9 @@ from posthog.client import sync_execute from posthog.models.instance_setting import get_instance_setting from posthog.models.performance.sql import PERFORMANCE_EVENT_DATA_TABLE -from posthog.session_recordings.sql.session_recording_event_sql import SESSION_RECORDING_EVENTS_DATA_TABLE +from posthog.session_recordings.sql.session_recording_event_sql import ( + SESSION_RECORDING_EVENTS_DATA_TABLE, +) from posthog.settings.data_stores import CLICKHOUSE_DATABASE from posthog.test.base import ClickhouseTestMixin, snapshot_clickhouse_alter_queries @@ -30,7 +32,10 @@ def test_update_recordings_ttl_setting(self): table_engine = sync_execute( "SELECT engine_full FROM system.tables WHERE database = %(database)s AND name = %(table)s", - {"database": CLICKHOUSE_DATABASE, "table": SESSION_RECORDING_EVENTS_DATA_TABLE()}, + { + "database": CLICKHOUSE_DATABASE, + "table": SESSION_RECORDING_EVENTS_DATA_TABLE(), + }, ) self.assertIn("TTL toDate(created_at) + toIntervalWeek(5)", table_engine[0][0]) @@ -40,7 +45,10 @@ def test_update_recordings_performance_events_ttl_setting(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json()["value"], 3) - response = self.client.patch(f"/api/instance_settings/RECORDINGS_PERFORMANCE_EVENTS_TTL_WEEKS", {"value": 5}) + response = self.client.patch( + f"/api/instance_settings/RECORDINGS_PERFORMANCE_EVENTS_TTL_WEEKS", + {"value": 5}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json()["value"], 5) diff --git a/ee/api/test/test_integration.py b/ee/api/test/test_integration.py index 16a95c8bc3646..d675415e4bd81 100644 --- a/ee/api/test/test_integration.py +++ b/ee/api/test/test_integration.py @@ -25,11 +25,16 @@ def _headers_for_payload(self, payload: Any): signature = ( "v0=" + hmac.new( - "not-so-secret".encode("utf-8"), sig_basestring.encode("utf-8"), digestmod=hashlib.sha256 + "not-so-secret".encode("utf-8"), + sig_basestring.encode("utf-8"), + digestmod=hashlib.sha256, ).hexdigest() ) - return {"HTTP_X_SLACK_SIGNATURE": signature, "HTTP_X_SLACK_REQUEST_TIMESTAMP": str(slack_time)} + return { + "HTTP_X_SLACK_SIGNATURE": signature, + "HTTP_X_SLACK_REQUEST_TIMESTAMP": str(slack_time), + } def test_validates_payload(self): body = {"type": "url_verification", "challenge": "to-a-duel!"} diff --git a/ee/api/test/test_license.py b/ee/api/test/test_license.py index 813035def9546..46199a2ab3138 100644 --- a/ee/api/test/test_license.py +++ b/ee/api/test/test_license.py @@ -75,7 +75,12 @@ def test_friendly_error_when_license_key_is_invalid(self, patch_post): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), - {"type": "license_error", "code": "invalid_key", "detail": "Provided key is invalid.", "attr": None}, + { + "type": "license_error", + "code": "invalid_key", + "detail": "Provided key is invalid.", + "attr": None, + }, ) self.assertEqual(License.objects.count(), count) @@ -84,11 +89,15 @@ def test_friendly_error_when_license_key_is_invalid(self, patch_post): def test_highest_activated_license_is_used_after_upgrade(self): with freeze_time("2022-06-01T12:00:00.000Z"): License.objects.create( - key="old", plan="scale", valid_until=timezone.datetime.now() + timezone.timedelta(days=30) + key="old", + plan="scale", + valid_until=timezone.datetime.now() + timezone.timedelta(days=30), ) with freeze_time("2022-06-03T12:00:00.000Z"): License.objects.create( - key="new", plan="enterprise", valid_until=timezone.datetime.now() + timezone.timedelta(days=30) + key="new", + plan="enterprise", + valid_until=timezone.datetime.now() + timezone.timedelta(days=30), ) with freeze_time("2022-06-03T13:00:00.000Z"): @@ -101,11 +110,15 @@ def test_highest_activated_license_is_used_after_upgrade(self): def test_highest_activated_license_is_used_after_renewal_to_lower(self): with freeze_time("2022-06-01T12:00:00.000Z"): License.objects.create( - key="new", plan="enterprise", valid_until=timezone.datetime.now() + timezone.timedelta(days=30) + key="new", + plan="enterprise", + valid_until=timezone.datetime.now() + timezone.timedelta(days=30), ) with freeze_time("2022-06-27T12:00:00.000Z"): License.objects.create( - key="old", plan="scale", valid_until=timezone.datetime.now() + timezone.timedelta(days=30) + key="old", + plan="scale", + valid_until=timezone.datetime.now() + timezone.timedelta(days=30), ) with freeze_time("2022-06-27T13:00:00.000Z"): @@ -125,7 +138,14 @@ def test_can_cancel_license(self, patch_post): self.assertEqual(Team.objects.count(), 4) self.assertEqual( sorted([team.id for team in Team.objects.all()]), - sorted([self.team.pk, to_be_deleted.pk, not_to_be_deleted.pk, from_another_organisation.pk]), + sorted( + [ + self.team.pk, + to_be_deleted.pk, + not_to_be_deleted.pk, + from_another_organisation.pk, + ] + ), ) mock = Mock() @@ -134,7 +154,10 @@ def test_can_cancel_license(self, patch_post): response = self.client.delete(f"/api/license/{self.license.pk}/") self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) self.assertEqual(Team.objects.count(), 2) # deleted two teams - self.assertEqual(sorted([team.id for team in Team.objects.all()]), sorted([self.team.pk, not_to_be_deleted.pk])) + self.assertEqual( + sorted([team.id for team in Team.objects.all()]), + sorted([self.team.pk, not_to_be_deleted.pk]), + ) self.assertEqual(Organization.objects.count(), 1) @pytest.mark.skip_on_multitenancy diff --git a/ee/api/test/test_organization.py b/ee/api/test/test_organization.py index 794ea986d383a..2f1b11bb95256 100644 --- a/ee/api/test/test_organization.py +++ b/ee/api/test/test_organization.py @@ -19,7 +19,10 @@ def test_create_organization(self): self.assertEqual(Organization.objects.count(), 2) response_data = response.json() self.assertEqual(response_data.get("name"), "Test") - self.assertEqual(OrganizationMembership.objects.filter(organization_id=response_data.get("id")).count(), 1) + self.assertEqual( + OrganizationMembership.objects.filter(organization_id=response_data.get("id")).count(), + 1, + ) self.assertEqual( OrganizationMembership.objects.get(organization_id=response_data.get("id"), user=self.user).level, OrganizationMembership.Level.OWNER, @@ -28,7 +31,10 @@ def test_create_organization(self): def test_create_two_similarly_named_organizations(self): random.seed(0) - response = self.client.post("/api/organizations/", {"name": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}) + response = self.client.post( + "/api/organizations/", + {"name": "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"}, + ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertDictContainsSubset( { @@ -39,7 +45,8 @@ def test_create_two_similarly_named_organizations(self): ) response = self.client.post( - "/api/organizations/", {"name": "#XXxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxX"} + "/api/organizations/", + {"name": "#XXxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxX"}, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertDictContainsSubset( @@ -81,13 +88,21 @@ def test_delete_last_organization(self, mock_capture): response = self.client.delete(f"/api/organizations/{org_id}") - self.assertEqual(response.status_code, 204, "Did not successfully delete last organization on the instance") + self.assertEqual( + response.status_code, + 204, + "Did not successfully delete last organization on the instance", + ) self.assertFalse(Organization.objects.filter(id=org_id).exists()) self.assertFalse(Organization.objects.exists()) response_bis = self.client.delete(f"/api/organizations/{org_id}") - self.assertEqual(response_bis.status_code, 404, "Did not return a 404 on trying to delete a nonexistent org") + self.assertEqual( + response_bis.status_code, + 404, + "Did not return a 404 on trying to delete a nonexistent org", + ) mock_capture.assert_called_once_with( self.user.distinct_id, @@ -97,7 +112,10 @@ def test_delete_last_organization(self, mock_capture): ) def test_no_delete_organization_not_owning(self): - for level in (OrganizationMembership.Level.MEMBER, OrganizationMembership.Level.ADMIN): + for level in ( + OrganizationMembership.Level.MEMBER, + OrganizationMembership.Level.ADMIN, + ): self.organization_membership.level = level self.organization_membership.save() response = self.client.delete(f"/api/organizations/{self.organization.id}") @@ -126,7 +144,10 @@ def test_delete_organization_owning(self): potential_err_message = f"Somehow did not delete the org as the owner" self.assertEqual(response.status_code, 204, potential_err_message) - self.assertFalse(Organization.objects.filter(id=self.organization.id).exists(), potential_err_message) + self.assertFalse( + Organization.objects.filter(id=self.organization.id).exists(), + potential_err_message, + ) self.assertFalse(OrganizationMembership.objects.filter(id__in=membership_ids).exists()) self.assertTrue(User.objects.filter(id=self.user.pk).exists()) @@ -139,11 +160,19 @@ def test_no_delete_organization_not_belonging_to(self): potential_err_message = f"Somehow managed to delete someone else's org as a level {level} in own org" self.assertEqual( response.json(), - {"attr": None, "detail": "Not found.", "code": "not_found", "type": "invalid_request"}, + { + "attr": None, + "detail": "Not found.", + "code": "not_found", + "type": "invalid_request", + }, potential_err_message, ) self.assertEqual(response.status_code, 404, potential_err_message) - self.assertTrue(Organization.objects.filter(id=organization.id).exists(), potential_err_message) + self.assertTrue( + Organization.objects.filter(id=organization.id).exists(), + potential_err_message, + ) def test_update_org(self): for level in OrganizationMembership.Level: @@ -151,7 +180,8 @@ def test_update_org(self): self.organization_membership.save() response_rename = self.client.patch(f"/api/organizations/{self.organization.id}", {"name": "Woof"}) response_email = self.client.patch( - f"/api/organizations/{self.organization.id}", {"is_member_join_email_enabled": False} + f"/api/organizations/{self.organization.id}", + {"is_member_join_email_enabled": False}, ) self.organization.refresh_from_db() @@ -183,7 +213,12 @@ def test_no_update_organization_not_belonging_to(self): potential_err_message = f"Somehow managed to update someone else's org as a level {level} in own org" self.assertEqual( response.json(), - {"attr": None, "detail": "Not found.", "code": "not_found", "type": "invalid_request"}, + { + "attr": None, + "detail": "Not found.", + "code": "not_found", + "type": "invalid_request", + }, potential_err_message, ) self.assertEqual(response.status_code, 404, potential_err_message) @@ -194,7 +229,11 @@ def test_feature_available_self_hosted_has_license(self): current_plans = License.PLANS License.PLANS = {"enterprise": ["whatever"]} # type: ignore with self.is_cloud(False): - License.objects.create(key="key", plan="enterprise", valid_until=dt.datetime.now() + dt.timedelta(days=1)) + License.objects.create( + key="key", + plan="enterprise", + valid_until=dt.datetime.now() + dt.timedelta(days=1), + ) # Still only old, empty available_features field value known self.assertFalse(self.organization.is_feature_available("whatever")) diff --git a/ee/api/test/test_organization_resource_access.py b/ee/api/test/test_organization_resource_access.py index 5607c802d571b..9123214a092db 100644 --- a/ee/api/test/test_organization_resource_access.py +++ b/ee/api/test/test_organization_resource_access.py @@ -23,7 +23,10 @@ def test_only_organization_admins_and_higher_can_set_resource_access(self): self.assertEqual(admin_create_res.status_code, status.HTTP_201_CREATED) get_res = self.client.get("/api/organizations/@current/resource_access") self.assertEqual(get_res.json()["count"], 1) - self.assertEqual(get_res.json()["results"][0]["resource"], OrganizationResourceAccess.Resources.FEATURE_FLAGS) + self.assertEqual( + get_res.json()["results"][0]["resource"], + OrganizationResourceAccess.Resources.FEATURE_FLAGS, + ) self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() @@ -75,13 +78,18 @@ def test_can_only_create_one_instance_of_each_resource_type(self): self.assertEqual(create_exp_resource_access.status_code, status.HTTP_201_CREATED) other_org = Organization.objects.create(name="other org") OrganizationResourceAccess.objects.create( - resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, organization=other_org + resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, + organization=other_org, ) self.assertEqual(OrganizationResourceAccess.objects.count(), 3) - self.assertEqual(OrganizationResourceAccess.objects.filter(organization=other_org).exists(), True) + self.assertEqual( + OrganizationResourceAccess.objects.filter(organization=other_org).exists(), + True, + ) with self.assertRaises(IntegrityError): OrganizationResourceAccess.objects.create( - resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, organization=self.organization + resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, + organization=self.organization, ) def test_can_change_access_levels_for_resources(self): @@ -97,7 +105,10 @@ def test_can_change_access_levels_for_resources(self): resource_id = create_res.json()["id"] get_res = self.client.get(f"/api/organizations/@current/resource_access/{resource_id}") - self.assertEqual(get_res.json()["access_level"], OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT) + self.assertEqual( + get_res.json()["access_level"], + OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT, + ) change_access_level = self.client.patch( f"/api/organizations/@current/resource_access/{resource_id}", @@ -106,7 +117,10 @@ def test_can_change_access_levels_for_resources(self): self.assertEqual(change_access_level.status_code, status.HTTP_200_OK) get_updated_res = self.client.get(f"/api/organizations/@current/resource_access/{resource_id}") - self.assertEqual(get_updated_res.json()["access_level"], OrganizationResourceAccess.AccessLevel.CAN_ONLY_VIEW) + self.assertEqual( + get_updated_res.json()["access_level"], + OrganizationResourceAccess.AccessLevel.CAN_ONLY_VIEW, + ) def test_default_edit_access_level_for_non_existing_resources(self): self.organization_membership.level = OrganizationMembership.Level.ADMIN @@ -148,7 +162,8 @@ def test_returns_correct_results_by_organization(self): ) other_org = Organization.objects.create(name="other org") OrganizationResourceAccess.objects.create( - resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, organization=other_org + resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, + organization=other_org, ) self.assertEqual(OrganizationResourceAccess.objects.count(), 3) res = self.client.get("/api/organizations/@current/resource_access") @@ -159,7 +174,8 @@ def test_returns_correct_results_by_organization(self): @snapshot_postgres_queries def test_list_organization_resource_access_is_not_nplus1(self): OrganizationResourceAccess.objects.create( - resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, organization=self.organization + resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, + organization=self.organization, ) with self.assertNumQueries(9): @@ -167,7 +183,8 @@ def test_list_organization_resource_access_is_not_nplus1(self): assert len(response.json()["results"]) == 1 OrganizationResourceAccess.objects.create( - resource=OrganizationResourceAccess.Resources.EXPERIMENTS, organization=self.organization + resource=OrganizationResourceAccess.Resources.EXPERIMENTS, + organization=self.organization, ) # one query less because rate limit instance setting was cached on last API call... maybe? sometimes? diff --git a/ee/api/test/test_property_definition.py b/ee/api/test/test_property_definition.py index 15866393773e8..ef8d4dd928540 100644 --- a/ee/api/test/test_property_definition.py +++ b/ee/api/test/test_property_definition.py @@ -30,7 +30,9 @@ def test_can_set_and_query_property_type_and_format(self): def test_errors_on_invalid_property_type(self): with pytest.raises(IntegrityError): EnterprisePropertyDefinition.objects.create( - team=self.team, name="a timestamp", property_type="not an allowed option" + team=self.team, + name="a timestamp", + property_type="not an allowed option", ) def test_retrieve_existing_property_definition(self): @@ -147,7 +149,10 @@ def test_update_property_definition(self): self.assertEqual(set(response_data["tags"]), {"official", "internal"}) property.refresh_from_db() - self.assertEqual(set(property.tagged_items.values_list("tag__name", flat=True)), {"official", "internal"}) + self.assertEqual( + set(property.tagged_items.values_list("tag__name", flat=True)), + {"official", "internal"}, + ) activity_log: Optional[ActivityLog] = ActivityLog.objects.first() assert activity_log is not None @@ -181,7 +186,8 @@ def test_update_property_definition_property_type(self): property = PropertyDefinition.objects.create(team=self.team, name="property") response = self.client.patch( - f"/api/projects/@current/property_definitions/{str(property.id)}/", {"property_type": "Numeric"} + f"/api/projects/@current/property_definitions/{str(property.id)}/", + {"property_type": "Numeric"}, ) response_data = response.json() @@ -199,7 +205,8 @@ def test_update_property_definition_non_numeric(self): ) response = self.client.patch( - f"/api/projects/@current/property_definitions/{str(property.id)}/", {"property_type": "DateTime"} + f"/api/projects/@current/property_definitions/{str(property.id)}/", + {"property_type": "DateTime"}, ) response_data = response.json() @@ -210,23 +217,32 @@ def test_update_property_definition_non_numeric(self): def test_update_property_description_without_license(self): property = EnterprisePropertyDefinition.objects.create(team=self.team, name="enterprise property") response = self.client.patch( - f"/api/projects/@current/property_definitions/{str(property.id)}/", data={"description": "test"} + f"/api/projects/@current/property_definitions/{str(property.id)}/", + data={"description": "test"}, ) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertIn("Self-hosted licenses are no longer available for purchase.", response.json()["detail"]) + self.assertIn( + "Self-hosted licenses are no longer available for purchase.", + response.json()["detail"], + ) def test_update_property_tags_without_license(self): property = EnterprisePropertyDefinition.objects.create(team=self.team, name="enterprise property") response = self.client.patch( - f"/api/projects/@current/property_definitions/{str(property.id)}/", data={"tags": ["test"]} + f"/api/projects/@current/property_definitions/{str(property.id)}/", + data={"tags": ["test"]}, ) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertIn("Self-hosted licenses are no longer available for purchase.", response.json()["detail"]) + self.assertIn( + "Self-hosted licenses are no longer available for purchase.", + response.json()["detail"], + ) def test_can_update_property_type_without_license(self): property = EnterprisePropertyDefinition.objects.create(team=self.team, name="enterprise property") response = self.client.patch( - f"/api/projects/@current/property_definitions/{str(property.id)}/", data={"property_type": "DateTime"} + f"/api/projects/@current/property_definitions/{str(property.id)}/", + data={"property_type": "DateTime"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() @@ -256,7 +272,10 @@ def test_cannot_update_more_than_property_type_without_license(self): data={"property_type": "DateTime", "tags": ["test"]}, ) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertIn("Self-hosted licenses are no longer available for purchase.", response.json()["detail"]) + self.assertIn( + "Self-hosted licenses are no longer available for purchase.", + response.json()["detail"], + ) def test_with_expired_license(self): super(LicenseManager, cast(LicenseManager, License.objects)).create( @@ -264,10 +283,14 @@ def test_with_expired_license(self): ) property = EnterprisePropertyDefinition.objects.create(team=self.team, name="description test") response = self.client.patch( - f"/api/projects/@current/property_definitions/{str(property.id)}/", data={"description": "test"} + f"/api/projects/@current/property_definitions/{str(property.id)}/", + data={"description": "test"}, ) self.assertEqual(response.status_code, status.HTTP_402_PAYMENT_REQUIRED) - self.assertIn("Self-hosted licenses are no longer available for purchase.", response.json()["detail"]) + self.assertIn( + "Self-hosted licenses are no longer available for purchase.", + response.json()["detail"], + ) def test_filter_property_definitions(self): super(LicenseManager, cast(LicenseManager, License.objects)).create( @@ -288,11 +311,14 @@ def test_event_property_definition_no_duplicate_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) property = EnterprisePropertyDefinition.objects.create(team=self.team, name="description test") response = self.client.patch( - f"/api/projects/@current/property_definitions/{str(property.id)}/", data={"tags": ["a", "b", "a"]} + f"/api/projects/@current/property_definitions/{str(property.id)}/", + data={"tags": ["a", "b", "a"]}, ) self.assertListEqual(sorted(response.json()["tags"]), ["a", "b"]) @@ -329,7 +355,10 @@ def test_verify_then_unverify(self): assert response.json()["verified_at"] is None # Verify the event - self.client.patch(f"/api/projects/@current/property_definitions/{event.id}", {"verified": True}) + self.client.patch( + f"/api/projects/@current/property_definitions/{event.id}", + {"verified": True}, + ) response = self.client.get(f"/api/projects/@current/property_definitions/{event.id}") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -338,7 +367,10 @@ def test_verify_then_unverify(self): assert response.json()["verified_at"] == "2021-08-25T22:09:14.252000Z" # Unverify the event - self.client.patch(f"/api/projects/@current/property_definitions/{event.id}", {"verified": False}) + self.client.patch( + f"/api/projects/@current/property_definitions/{event.id}", + {"verified": False}, + ) response = self.client.get(f"/api/projects/@current/property_definitions/{event.id}") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -359,7 +391,10 @@ def test_verify_then_verify_again_no_change(self): assert response.json()["verified_at"] is None with freeze_time("2021-08-25T22:09:14.252Z"): - self.client.patch(f"/api/projects/@current/property_definitions/{event.id}", {"verified": True}) + self.client.patch( + f"/api/projects/@current/property_definitions/{event.id}", + {"verified": True}, + ) response = self.client.get(f"/api/projects/@current/property_definitions/{event.id}") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -369,7 +404,10 @@ def test_verify_then_verify_again_no_change(self): assert response.json()["updated_at"] == "2021-08-25T22:09:14.252000Z" with freeze_time("2021-10-26T22:09:14.252Z"): - self.client.patch(f"/api/projects/@current/property_definitions/{event.id}", {"verified": True}) + self.client.patch( + f"/api/projects/@current/property_definitions/{event.id}", + {"verified": True}, + ) response = self.client.get(f"/api/projects/@current/property_definitions/{event.id}") self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/ee/api/test/test_role.py b/ee/api/test/test_role.py index be1c315945de7..1a3068ff4cf4f 100644 --- a/ee/api/test/test_role.py +++ b/ee/api/test/test_role.py @@ -90,13 +90,17 @@ def test_updating_feature_flags_access_level_for_a_role(self): self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() self.assertEqual(self.organization_membership.level, OrganizationMembership.Level.ADMIN) - self.assertEqual(role.feature_flags_access_level, OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT) + self.assertEqual( + role.feature_flags_access_level, + OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT, + ) self.client.patch( f"/api/organizations/@current/roles/{role.id}", {"feature_flags_access_level": OrganizationResourceAccess.AccessLevel.CAN_ONLY_VIEW}, ) self.assertEqual( - Role.objects.first().feature_flags_access_level, OrganizationResourceAccess.AccessLevel.CAN_ONLY_VIEW # type: ignore + Role.objects.first().feature_flags_access_level, + OrganizationResourceAccess.AccessLevel.CAN_ONLY_VIEW, # type: ignore ) self.client.patch( f"/api/organizations/@current/roles/{role.id}", diff --git a/ee/api/test/test_subscription.py b/ee/api/test/test_subscription.py index 16bfb76b08fbd..cabc81d66eed0 100644 --- a/ee/api/test/test_subscription.py +++ b/ee/api/test/test_subscription.py @@ -16,7 +16,10 @@ class TestSubscription(APILicensedTest): dashboard: Dashboard = None # type: ignore insight: Insight = None # type: ignore - insight_filter_dict = {"events": [{"id": "$pageview"}], "properties": [{"key": "$browser", "value": "Mac OS X"}]} + insight_filter_dict = { + "events": [{"id": "$pageview"}], + "properties": [{"key": "$browser", "value": "Mac OS X"}], + } @classmethod def setUpTestData(cls): @@ -24,7 +27,9 @@ def setUpTestData(cls): cls.dashboard = Dashboard.objects.create(team=cls.team, name="example dashboard", created_by=cls.user) cls.insight = Insight.objects.create( - filters=Filter(data=cls.insight_filter_dict).to_dict(), team=cls.team, created_by=cls.user + filters=Filter(data=cls.insight_filter_dict).to_dict(), + team=cls.team, + created_by=cls.user, ) def _create_subscription(self, **kwargs): @@ -96,7 +101,10 @@ def test_can_update_existing_subscription(self, mock_subscription_tasks): mock_subscription_tasks.handle_subscription_value_change.delay.reset_mock() response = self.client.patch( f"/api/projects/{self.team.id}/subscriptions/{data['id']}", - {"target_value": "test@posthog.com,new_user@posthog.com", "invite_message": "hi new user"}, + { + "target_value": "test@posthog.com,new_user@posthog.com", + "invite_message": "hi new user", + }, ) updated_data = response.json() assert updated_data["target_value"] == "test@posthog.com,new_user@posthog.com" diff --git a/ee/api/test/test_tagged_item.py b/ee/api/test/test_tagged_item.py index 18c9d28af38b7..cc0e29f6495d8 100644 --- a/ee/api/test/test_tagged_item.py +++ b/ee/api/test/test_tagged_item.py @@ -18,7 +18,9 @@ def test_get_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) dashboard = Dashboard.objects.create(team_id=self.team.id, name="private dashboard") @@ -35,7 +37,9 @@ def test_resolve_overlapping_tags_on_update(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) dashboard = Dashboard.objects.create(team_id=self.team.id, name="private dashboard") @@ -60,17 +64,25 @@ def test_create_and_update_object_with_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) - response = self.client.post(f"/api/projects/{self.team.id}/dashboards/", {"name": "Default", "pinned": "true"}) + response = self.client.post( + f"/api/projects/{self.team.id}/dashboards/", + {"name": "Default", "pinned": "true"}, + ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.json()["tags"], []) self.assertEqual(TaggedItem.objects.all().count(), 0) id = response.json()["id"] - response = self.client.patch(f"/api/projects/{self.team.id}/dashboards/{id}", {"tags": ["b", "c", "d", "e"]}) + response = self.client.patch( + f"/api/projects/{self.team.id}/dashboards/{id}", + {"tags": ["b", "c", "d", "e"]}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(sorted(response.json()["tags"]), ["b", "c", "d", "e"]) self.assertEqual(TaggedItem.objects.all().count(), 4) @@ -79,11 +91,14 @@ def test_create_with_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) response = self.client.post( - f"/api/projects/{self.team.id}/dashboards/", {"name": "Default", "pinned": "true", "tags": ["nightly"]} + f"/api/projects/{self.team.id}/dashboards/", + {"name": "Default", "pinned": "true", "tags": ["nightly"]}, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -94,12 +109,15 @@ def test_no_duplicate_tags(self): from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) dashboard = Dashboard.objects.create(team=self.team, name="Edit-restricted dashboard", created_by=self.user) response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", {"tags": ["a", "b", "a"]} + f"/api/projects/{self.team.id}/dashboards/{dashboard.id}", + {"tags": ["a", "b", "a"]}, ) self.assertListEqual(sorted(response.json()["tags"]), ["a", "b"]) @@ -108,7 +126,9 @@ def test_can_list_tags(self) -> None: from ee.models.license import License, LicenseManager super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) dashboard = Dashboard.objects.create(team_id=self.team.id, name="private dashboard") diff --git a/ee/api/test/test_team.py b/ee/api/test/test_team.py index 7c5f9c7836ee5..7bc7827d1f4b8 100644 --- a/ee/api/test/test_team.py +++ b/ee/api/test/test_team.py @@ -44,7 +44,8 @@ def test_non_admin_cannot_create_project(self): self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) self.assertEqual(Team.objects.count(), count) self.assertEqual( - response.json(), self.permission_denied_response("Your organization access level is insufficient.") + response.json(), + self.permission_denied_response("Your organization access level is insufficient."), ) def test_create_demo_project(self, *args): @@ -128,7 +129,9 @@ def test_delete_open_team_as_org_member_but_project_admin_forbidden(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) response = self.client.delete(f"/api/projects/{self.team.id}") self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) @@ -140,7 +143,9 @@ def test_delete_private_team_as_org_member_but_project_admin_allowed(self): self.team.access_control = True self.team.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) response = self.client.delete(f"/api/projects/{self.team.id}") self.assertEqual(response.status_code, HTTP_204_NO_CONTENT) @@ -219,7 +224,9 @@ def test_rename_private_project_as_org_member_and_project_member_allowed(self): self.team.access_control = True self.team.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.MEMBER, ) response = self.client.patch(f"/api/projects/@current/", {"name": "Acherontia atropos"}) @@ -252,7 +259,9 @@ def test_enable_access_control_as_org_member_and_project_admin_forbidden(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) response = self.client.patch(f"/api/projects/@current/", {"access_control": True}) @@ -281,7 +290,9 @@ def test_disable_access_control_as_org_member_and_project_admin_forbidden(self): self.team.access_control = True self.team.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) response = self.client.patch(f"/api/projects/@current/", {"access_control": False}) @@ -304,7 +315,8 @@ def test_disable_access_control_as_org_admin_allowed(self): def test_can_update_and_retrieve_person_property_names_excluded_from_correlation(self): response = self.client.patch( - f"/api/projects/@current/", {"correlation_config": {"excluded_person_property_names": ["$os"]}} + f"/api/projects/@current/", + {"correlation_config": {"excluded_person_property_names": ["$os"]}}, ) self.assertEqual(response.status_code, HTTP_200_OK) @@ -314,7 +326,8 @@ def test_can_update_and_retrieve_person_property_names_excluded_from_correlation response_data = response.json() self.assertDictContainsSubset( - {"correlation_config": {"excluded_person_property_names": ["$os"]}}, response_data + {"correlation_config": {"excluded_person_property_names": ["$os"]}}, + response_data, ) # Fetching projects @@ -364,7 +377,8 @@ def test_fetch_private_team_as_org_member(self): self.assertEqual(response.status_code, HTTP_403_FORBIDDEN) self.assertEqual( - self.permission_denied_response("You don't have sufficient permissions in the project."), response_data + self.permission_denied_response("You don't have sufficient permissions in the project."), + response_data, ) def test_fetch_private_team_as_org_member_and_project_member(self): @@ -373,7 +387,9 @@ def test_fetch_private_team_as_org_member_and_project_member(self): self.team.access_control = True self.team.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.MEMBER, ) response = self.client.get(f"/api/projects/@current/") @@ -395,7 +411,9 @@ def test_fetch_private_team_as_org_member_and_project_admin(self): self.team.access_control = True self.team.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) response = self.client.get(f"/api/projects/@current/") diff --git a/ee/api/test/test_team_memberships.py b/ee/api/test/test_team_memberships.py index 1239b0d08e7a0..3aa19e266074d 100644 --- a/ee/api/test/test_team_memberships.py +++ b/ee/api/test/test_team_memberships.py @@ -25,7 +25,10 @@ def test_add_member_as_org_owner_allowed(self): response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER}, + { + "effective_level": ExplicitTeamMembership.Level.MEMBER, + "level": ExplicitTeamMembership.Level.MEMBER, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -44,7 +47,10 @@ def test_add_member_as_org_admin_allowed(self): response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER}, + { + "effective_level": ExplicitTeamMembership.Level.MEMBER, + "level": ExplicitTeamMembership.Level.MEMBER, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -63,7 +69,8 @@ def test_add_member_as_org_member_forbidden(self): response_data = response.json() self.assertDictEqual( - self.permission_denied_response("You don't have sufficient permissions in the project."), response_data + self.permission_denied_response("You don't have sufficient permissions in the project."), + response_data, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -79,7 +86,8 @@ def test_add_yourself_as_org_member_forbidden(self): response_data = response.json() self.assertDictEqual( - self.permission_denied_response("You don't have sufficient permissions in the project."), response_data + self.permission_denied_response("You don't have sufficient permissions in the project."), + response_data, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -95,7 +103,8 @@ def test_add_yourself_as_org_admin_forbidden(self): response_data = response.json() self.assertDictEqual( - self.permission_denied_response("You can't explicitly add yourself to projects."), response_data + self.permission_denied_response("You can't explicitly add yourself to projects."), + response_data, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -105,7 +114,9 @@ def test_add_member_as_org_member_and_project_member_forbidden(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.MEMBER, ) new_user: User = User.objects.create_and_join(self.organization, "rookie@posthog.com", None) @@ -116,7 +127,8 @@ def test_add_member_as_org_member_and_project_member_forbidden(self): response_data = response.json() self.assertDictEqual( - self.permission_denied_response("You don't have sufficient permissions in the project."), response_data + self.permission_denied_response("You don't have sufficient permissions in the project."), + response_data, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -126,7 +138,9 @@ def test_add_member_as_org_member_but_project_admin_allowed(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) self.assertEqual(self.team.explicit_memberships.count(), 1) @@ -137,7 +151,10 @@ def test_add_member_as_org_member_but_project_admin_allowed(self): response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER}, + { + "effective_level": ExplicitTeamMembership.Level.MEMBER, + "level": ExplicitTeamMembership.Level.MEMBER, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -148,7 +165,9 @@ def test_add_member_as_org_admin_and_project_member_allowed(self): self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.MEMBER, ) new_user: User = User.objects.create_and_join(self.organization, "rookie@posthog.com", None) @@ -157,7 +176,10 @@ def test_add_member_as_org_admin_and_project_member_allowed(self): response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER}, + { + "effective_level": ExplicitTeamMembership.Level.MEMBER, + "level": ExplicitTeamMembership.Level.MEMBER, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -175,7 +197,10 @@ def test_add_admin_as_org_admin_allowed(self): response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.ADMIN, "level": ExplicitTeamMembership.Level.ADMIN}, + { + "effective_level": ExplicitTeamMembership.Level.ADMIN, + "level": ExplicitTeamMembership.Level.ADMIN, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -184,7 +209,9 @@ def test_add_admin_as_project_member_forbidden(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.MEMBER, ) new_user: User = User.objects.create_and_join(self.organization, "rookie@posthog.com", None) @@ -196,7 +223,8 @@ def test_add_admin_as_project_member_forbidden(self): response_data = response.json() self.assertDictEqual( - self.permission_denied_response("You don't have sufficient permissions in the project."), response_data + self.permission_denied_response("You don't have sufficient permissions in the project."), + response_data, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -204,7 +232,9 @@ def test_add_admin_as_project_admin_allowed(self): self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) new_user: User = User.objects.create_and_join(self.organization, "rookie@posthog.com", None) @@ -216,7 +246,10 @@ def test_add_admin_as_project_admin_allowed(self): response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.ADMIN, "level": ExplicitTeamMembership.Level.ADMIN}, + { + "effective_level": ExplicitTeamMembership.Level.ADMIN, + "level": ExplicitTeamMembership.Level.ADMIN, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -228,11 +261,17 @@ def test_add_member_to_non_current_project_allowed(self): new_user: User = User.objects.create_and_join(self.organization, "rookie@posthog.com", None) - response = self.client.post(f"/api/projects/{another_team.id}/explicit_members/", {"user_uuid": new_user.uuid}) + response = self.client.post( + f"/api/projects/{another_team.id}/explicit_members/", + {"user_uuid": new_user.uuid}, + ) response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER}, + { + "effective_level": ExplicitTeamMembership.Level.MEMBER, + "level": ExplicitTeamMembership.Level.MEMBER, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -244,11 +283,15 @@ def test_add_member_to_project_in_outside_organization_forbidden(self): "Acme", "mallory@acme.com", None, team_fields={"access_control": True} ) - response = self.client.post(f"/api/projects/{new_team.id}/explicit_members/", {"user_uuid": new_user.uuid}) + response = self.client.post( + f"/api/projects/{new_team.id}/explicit_members/", + {"user_uuid": new_user.uuid}, + ) response_data = response.json() self.assertDictEqual( - self.permission_denied_response("You don't have sufficient permissions in the project."), response_data + self.permission_denied_response("You don't have sufficient permissions in the project."), + response_data, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -261,7 +304,8 @@ def test_add_member_to_project_that_is_not_organization_member_forbidden(self): response_data = response.json() self.assertDictEqual( - self.permission_denied_response("You both need to belong to the same organization."), response_data + self.permission_denied_response("You both need to belong to the same organization."), + response_data, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -287,12 +331,16 @@ def test_set_level_of_member_to_admin_as_org_owner_allowed(self): ExplicitTeamMembership.objects.create(team=self.team, parent_membership=new_org_membership) response = self.client.patch( - f"/api/projects/@current/explicit_members/{new_user.uuid}", {"level": ExplicitTeamMembership.Level.ADMIN} + f"/api/projects/@current/explicit_members/{new_user.uuid}", + {"level": ExplicitTeamMembership.Level.ADMIN}, ) response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.ADMIN, "level": ExplicitTeamMembership.Level.ADMIN}, + { + "effective_level": ExplicitTeamMembership.Level.ADMIN, + "level": ExplicitTeamMembership.Level.ADMIN, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -308,12 +356,14 @@ def test_set_level_of_member_to_admin_as_org_member_forbidden(self): ExplicitTeamMembership.objects.create(team=self.team, parent_membership=new_org_membership) response = self.client.patch( - f"/api/projects/@current/explicit_members/{new_user.uuid}", {"level": ExplicitTeamMembership.Level.ADMIN} + f"/api/projects/@current/explicit_members/{new_user.uuid}", + {"level": ExplicitTeamMembership.Level.ADMIN}, ) response_data = response.json() self.assertDictEqual( - self.permission_denied_response("You don't have sufficient permissions in the project."), response_data + self.permission_denied_response("You don't have sufficient permissions in the project."), + response_data, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -321,22 +371,30 @@ def test_demote_yourself_as_org_member_and_project_admin_forbidden(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) response = self.client.patch( - f"/api/projects/@current/explicit_members/{self.user.uuid}", {"level": ExplicitTeamMembership.Level.MEMBER} + f"/api/projects/@current/explicit_members/{self.user.uuid}", + {"level": ExplicitTeamMembership.Level.MEMBER}, ) response_data = response.json() - self.assertDictEqual(self.permission_denied_response("You can't set your own access level."), response_data) + self.assertDictEqual( + self.permission_denied_response("You can't set your own access level."), + response_data, + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_set_level_of_member_to_admin_as_org_member_but_project_admin_allowed(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) new_user: User = User.objects.create_and_join(self.organization, "rookie@posthog.com", None) @@ -346,12 +404,16 @@ def test_set_level_of_member_to_admin_as_org_member_but_project_admin_allowed(se ExplicitTeamMembership.objects.create(team=self.team, parent_membership=new_org_membership) response = self.client.patch( - f"/api/projects/@current/explicit_members/{new_user.uuid}", {"level": ExplicitTeamMembership.Level.ADMIN} + f"/api/projects/@current/explicit_members/{new_user.uuid}", + {"level": ExplicitTeamMembership.Level.ADMIN}, ) response_data = response.json() self.assertDictContainsSubset( - {"effective_level": ExplicitTeamMembership.Level.ADMIN, "level": ExplicitTeamMembership.Level.ADMIN}, + { + "effective_level": ExplicitTeamMembership.Level.ADMIN, + "level": ExplicitTeamMembership.Level.ADMIN, + }, response_data, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -388,7 +450,9 @@ def test_remove_member_as_org_member_but_project_admin_allowed(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) new_user: User = User.objects.create_and_join(self.organization, "rookie@posthog.com", None) @@ -425,7 +489,9 @@ def test_leave_project_as_admin_allowed(self): self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) response = self.client.delete(f"/api/projects/@current/explicit_members/{self.user.uuid}") @@ -436,7 +502,9 @@ def test_leave_project_as_admin_member(self): self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.MEMBER, ) response = self.client.delete(f"/api/projects/@current/explicit_members/{self.user.uuid}") @@ -456,7 +524,9 @@ def test_set_current_project_no_access(self): self.organization_membership.level = OrganizationMembership.Level.MEMBER self.organization_membership.save() ExplicitTeamMembership.objects.create( - team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN + team=self.team, + parent_membership=self.organization_membership, + level=ExplicitTeamMembership.Level.ADMIN, ) Team.objects.create(organization=self.organization) diff --git a/ee/api/test/test_time_to_see_data.py b/ee/api/test/test_time_to_see_data.py index bbf826723d7eb..4c5a50d51e58f 100644 --- a/ee/api/test/test_time_to_see_data.py +++ b/ee/api/test/test_time_to_see_data.py @@ -39,15 +39,27 @@ def test_sessions_api(self): insert( "metrics_time_to_see_data", [ - MetricsRow(session_id="456", timestamp="2022-10-05 12:20:30", time_to_see_data_ms=7000), - MetricsRow(session_id="123", timestamp="2022-10-05 10:10:30", time_to_see_data_ms=2000), + MetricsRow( + session_id="456", + timestamp="2022-10-05 12:20:30", + time_to_see_data_ms=7000, + ), + MetricsRow( + session_id="123", + timestamp="2022-10-05 10:10:30", + time_to_see_data_ms=2000, + ), MetricsRow( session_id="123", timestamp="2022-10-05 10:30:25", time_to_see_data_ms=1000, is_primary_interaction=False, ), - MetricsRow(session_id="123", timestamp="2022-10-05 10:30:30", time_to_see_data_ms=7000), + MetricsRow( + session_id="123", + timestamp="2022-10-05 10:30:30", + time_to_see_data_ms=7000, + ), ], ) @@ -90,7 +102,11 @@ def test_session_events_api(self): insert( "metrics_time_to_see_data", [ - MetricsRow(session_id="456", timestamp="2022-10-05 12:20:30", time_to_see_data_ms=7000), + MetricsRow( + session_id="456", + timestamp="2022-10-05 12:20:30", + time_to_see_data_ms=7000, + ), MetricsRow( session_id="123", timestamp="2022-10-05 10:10:30", @@ -105,7 +121,11 @@ def test_session_events_api(self): primary_interaction_id="111-222-333", query_id="777", ), - MetricsRow(session_id="123", timestamp="2022-10-05 10:30:30", time_to_see_data_ms=7000), + MetricsRow( + session_id="123", + timestamp="2022-10-05 10:30:30", + time_to_see_data_ms=7000, + ), ], ) diff --git a/ee/api/time_to_see_data.py b/ee/api/time_to_see_data.py index 7710c8ade2f98..7c3ed10ed80fa 100644 --- a/ee/api/time_to_see_data.py +++ b/ee/api/time_to_see_data.py @@ -3,7 +3,10 @@ from rest_framework.response import Response from posthog.permissions import IsStaffUser -from posthog.queries.time_to_see_data.serializers import SessionEventsQuerySerializer, SessionsQuerySerializer +from posthog.queries.time_to_see_data.serializers import ( + SessionEventsQuerySerializer, + SessionsQuerySerializer, +) from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions diff --git a/ee/benchmarks/benchmarks.py b/ee/benchmarks/benchmarks.py index 4f61f82df0482..f4dc81eb2dfa5 100644 --- a/ee/benchmarks/benchmarks.py +++ b/ee/benchmarks/benchmarks.py @@ -11,9 +11,14 @@ from ee.clickhouse.queries.stickiness import ClickhouseStickiness from ee.clickhouse.queries.funnels.funnel_correlation import FunnelCorrelation from posthog.queries.funnels import ClickhouseFunnel -from posthog.queries.property_values import get_property_values_for_key, get_person_property_values_for_key +from posthog.queries.property_values import ( + get_property_values_for_key, + get_person_property_values_for_key, +) from posthog.queries.trends.trends import Trends -from posthog.queries.session_recordings.session_recording_list import SessionRecordingList +from posthog.queries.session_recordings.session_recording_list import ( + SessionRecordingList, +) from ee.clickhouse.queries.retention import ClickhouseRetention from posthog.queries.util import get_earliest_timestamp from posthog.models import Action, ActionStep, Cohort, Team, Organization @@ -33,7 +38,11 @@ ] DATE_RANGE = {"date_from": "2021-01-01", "date_to": "2021-10-01", "interval": "week"} -SHORT_DATE_RANGE = {"date_from": "2021-07-01", "date_to": "2021-10-01", "interval": "week"} +SHORT_DATE_RANGE = { + "date_from": "2021-07-01", + "date_to": "2021-10-01", + "interval": "week", +} SESSIONS_DATE_RANGE = {"date_from": "2021-11-17", "date_to": "2021-11-22"} @@ -102,7 +111,14 @@ def track_trends_person_property_filter(self): filter = Filter( data={ "events": [{"id": "$pageview"}], - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, } ) @@ -115,7 +131,14 @@ def track_trends_person_property_filter_materialized(self): filter = Filter( data={ "events": [{"id": "$pageview"}], - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, } ) @@ -138,7 +161,12 @@ def track_trends_event_property_breakdown_materialized(self): @benchmark_clickhouse def track_trends_person_property_breakdown(self): filter = Filter( - data={"events": [{"id": "$pageview"}], "breakdown": "$browser", "breakdown_type": "person", **DATE_RANGE} + data={ + "events": [{"id": "$pageview"}], + "breakdown": "$browser", + "breakdown_type": "person", + **DATE_RANGE, + } ) with no_materialized_columns(): @@ -147,7 +175,12 @@ def track_trends_person_property_breakdown(self): @benchmark_clickhouse def track_trends_person_property_breakdown_materialized(self): filter = Filter( - data={"events": [{"id": "$pageview"}], "breakdown": "$browser", "breakdown_type": "person", **DATE_RANGE} + data={ + "events": [{"id": "$pageview"}], + "breakdown": "$browser", + "breakdown_type": "person", + **DATE_RANGE, + } ) Trends().run(filter, self.team) @@ -162,7 +195,14 @@ def track_trends_dau_person_property_filter(self): filter = Filter( data={ "events": [{"id": "$pageview", "math": "dau"}], - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, } ) @@ -175,7 +215,14 @@ def track_trends_dau_person_property_filter_materialized(self): filter = Filter( data={ "events": [{"id": "$pageview", "math": "dau"}], - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, } ) @@ -253,7 +300,14 @@ def track_trends_filter_by_action_with_person_filters_materialized(self): ActionStep.objects.create( action=action, event="$pageview", - properties=[{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + properties=[ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], ) filter = Filter(data={"actions": [{"id": action.id}], **DATE_RANGE}, team=self.team) @@ -265,7 +319,14 @@ def track_trends_filter_by_action_with_person_filters(self): ActionStep.objects.create( action=action, event="$pageview", - properties=[{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + properties=[ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], ) filter = Filter(data={"actions": [{"id": action.id}], **DATE_RANGE}, team=self.team) @@ -277,7 +338,10 @@ def track_funnel_normal(self): filter = Filter( data={ "insight": "FUNNELS", - "events": [{"id": "user signed up", "order": 0}, {"id": "insight analyzed", "order": 1}], + "events": [ + {"id": "user signed up", "order": 0}, + {"id": "insight analyzed", "order": 1}, + ], **DATE_RANGE, }, team=self.team, @@ -287,7 +351,11 @@ def track_funnel_normal(self): @benchmark_clickhouse def track_correlations_by_events(self): filter = Filter( - data={"events": [{"id": "user signed up"}, {"id": "insight analyzed"}], **SHORT_DATE_RANGE}, team=self.team + data={ + "events": [{"id": "user signed up"}, {"id": "insight analyzed"}], + **SHORT_DATE_RANGE, + }, + team=self.team, ) FunnelCorrelation(filter, self.team).run() @@ -369,7 +437,14 @@ def track_stickiness_filter_by_person_property(self): "events": [{"id": "$pageview"}], "shown_as": "Stickiness", "display": "ActionsLineGraph", - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, }, team=self.team, @@ -386,7 +461,14 @@ def track_stickiness_filter_by_person_property_materialized(self): "events": [{"id": "$pageview"}], "shown_as": "Stickiness", "display": "ActionsLineGraph", - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, }, team=self.team, @@ -402,7 +484,10 @@ def track_session_recordings_list(self): @benchmark_clickhouse def track_session_recordings_list_event_filter(self): - filter = SessionRecordingsFilter(data={"events": [{"id": "$pageview"}], **SESSIONS_DATE_RANGE}, team=self.team) + filter = SessionRecordingsFilter( + data={"events": [{"id": "$pageview"}], **SESSIONS_DATE_RANGE}, + team=self.team, + ) SessionRecordingList(filter, self.team).run() @@ -413,7 +498,14 @@ def track_session_recordings_list_person_property_filter(self): "events": [ { "id": "$pageview", - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], } ], **SESSIONS_DATE_RANGE, @@ -473,7 +565,14 @@ def track_retention_filter_by_person_property(self): "total_intervals": 14, "retention_type": "retention_first_time", "period": "Week", - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, }, team=self.team, @@ -492,7 +591,14 @@ def track_retention_filter_by_person_property_materialized(self): "total_intervals": 14, "retention_type": "retention_first_time", "period": "Week", - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, }, team=self.team, @@ -584,7 +690,14 @@ def track_lifecycle_person_property_filter(self): "interval": "week", "shown_as": "Lifecycle", "date_from": "-14d", - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, }, team=self.team, @@ -602,7 +715,14 @@ def track_lifecycle_person_property_filter_materialized(self): "interval": "week", "shown_as": "Lifecycle", "date_from": "-14d", - "properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], **DATE_RANGE, }, team=self.team, @@ -636,7 +756,11 @@ def setup(self): for table, property in MATERIALIZED_PROPERTIES: if (property, "properties") not in get_materialized_columns(table): materialize(table, property) - backfill_materialized_columns(table, [(property, "properties")], backfill_period=timedelta(days=1_000)) + backfill_materialized_columns( + table, + [(property, "properties")], + backfill_period=timedelta(days=1_000), + ) # :TRICKY: Data in benchmark servers has ID=2 team = Team.objects.filter(id=2).first() @@ -650,7 +774,18 @@ def setup(self): cohort = Cohort.objects.create( team_id=2, name="benchmarking cohort", - groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ] + } + ], ) cohort.calculate_people_ch(pending_version=0) self.cohort = cohort diff --git a/ee/benchmarks/helpers.py b/ee/benchmarks/helpers.py index f6d4f2b8e6995..1a22550aceef0 100644 --- a/ee/benchmarks/helpers.py +++ b/ee/benchmarks/helpers.py @@ -71,6 +71,9 @@ def inner(*args): @contextmanager def no_materialized_columns(): "Allows running a function without any materialized columns being used in query" - get_materialized_columns._cache = {("events",): (now(), {}), ("person",): (now(), {})} + get_materialized_columns._cache = { + ("events",): (now(), {}), + ("person",): (now(), {}), + } yield get_materialized_columns._cache = {} diff --git a/ee/billing/billing_manager.py b/ee/billing/billing_manager.py index b6d079c74c32f..0790952c49ab1 100644 --- a/ee/billing/billing_manager.py +++ b/ee/billing/billing_manager.py @@ -162,7 +162,10 @@ def _get_billing(self, organization: Organization) -> BillingStatus: if not self.license: # mypy raise Exception("No license found") - res = requests.get(f"{BILLING_SERVICE_URL}/api/billing", headers=self.get_auth_headers(organization)) + res = requests.get( + f"{BILLING_SERVICE_URL}/api/billing", + headers=self.get_auth_headers(organization), + ) handle_billing_service_error(res) @@ -177,7 +180,10 @@ def _get_stripe_portal_url(self, organization: Organization) -> BillingStatus: if not self.license: # mypy raise Exception("No license found") - res = requests.get(f"{BILLING_SERVICE_URL}/api/billing/portal", headers=self.get_auth_headers(organization)) + res = requests.get( + f"{BILLING_SERVICE_URL}/api/billing/portal", + headers=self.get_auth_headers(organization), + ) handle_billing_service_error(res) diff --git a/ee/billing/quota_limiting.py b/ee/billing/quota_limiting.py index 0f0ebb728fcb7..ae6eefcc0b77a 100644 --- a/ee/billing/quota_limiting.py +++ b/ee/billing/quota_limiting.py @@ -204,7 +204,10 @@ def update_all_org_billing_quotas(dry_run: bool = False) -> Dict[str, Dict[str, # Get the current quota limits so we can track to poshog if it changes orgs_with_changes = set() - previously_quota_limited_team_tokens: Dict[str, Dict[str, int]] = {"events": {}, "recordings": {}} + previously_quota_limited_team_tokens: Dict[str, Dict[str, int]] = { + "events": {}, + "recordings": {}, + } for field in quota_limited_orgs: previously_quota_limited_team_tokens[field] = list_limited_team_tokens(QuotaResource(field)) @@ -233,7 +236,10 @@ def update_all_org_billing_quotas(dry_run: bool = False) -> Dict[str, Dict[str, } report_organization_action( - orgs_by_id[org_id], "organization quota limits changed", properties=properties, group_properties=properties + orgs_by_id[org_id], + "organization quota limits changed", + properties=properties, + group_properties=properties, ) if not dry_run: diff --git a/ee/billing/test/test_billing_manager.py b/ee/billing/test/test_billing_manager.py index b5e8d9a6f6438..e0c09e0d071fb 100644 --- a/ee/billing/test/test_billing_manager.py +++ b/ee/billing/test/test_billing_manager.py @@ -18,7 +18,9 @@ class TestBillingManager(BaseTest): def test_update_billing_distinct_ids(self, billing_patch_request_mock: MagicMock): organization = self.organization license = super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key123::key123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key123::key123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) User.objects.create_and_join( organization=organization, diff --git a/ee/clickhouse/materialized_columns/analyze.py b/ee/clickhouse/materialized_columns/analyze.py index 200448bee18a1..4ab0f58286610 100644 --- a/ee/clickhouse/materialized_columns/analyze.py +++ b/ee/clickhouse/materialized_columns/analyze.py @@ -20,7 +20,10 @@ from posthog.cache_utils import instance_memoize from posthog.client import sync_execute from posthog.models.filters.mixins.utils import cached_property -from posthog.models.person.sql import GET_EVENT_PROPERTIES_COUNT, GET_PERSON_PROPERTIES_COUNT +from posthog.models.person.sql import ( + GET_EVENT_PROPERTIES_COUNT, + GET_PERSON_PROPERTIES_COUNT, +) from posthog.models.property import PropertyName, TableColumn, TableWithProperties from posthog.models.property_definition import PropertyDefinition from posthog.models.team import Team @@ -50,7 +53,8 @@ def person_on_events_properties(self, team_id: str) -> Set[str]: @instance_memoize def group_on_events_properties(self, group_type_index: int, team_id: str) -> Set[str]: return self._get_properties( - GET_EVENT_PROPERTIES_COUNT.format(column_name=f"group{group_type_index}_properties"), team_id + GET_EVENT_PROPERTIES_COUNT.format(column_name=f"group{group_type_index}_properties"), + team_id, ) def _get_properties(self, query, team_id) -> Set[str]: @@ -59,7 +63,12 @@ def _get_properties(self, query, team_id) -> Set[str]: class Query: - def __init__(self, query_string: str, query_time_ms: float, min_query_time=MATERIALIZE_COLUMNS_MINIMUM_QUERY_TIME): + def __init__( + self, + query_string: str, + query_time_ms: float, + min_query_time=MATERIALIZE_COLUMNS_MINIMUM_QUERY_TIME, + ): self.query_string = query_string self.query_time_ms = query_time_ms self.min_query_time = min_query_time @@ -187,7 +196,10 @@ def materialize_properties_task( else: logger.info("Found no columns to materialize.") - properties: Dict[TableWithProperties, List[Tuple[PropertyName, TableColumn]]] = {"events": [], "person": []} + properties: Dict[TableWithProperties, List[Tuple[PropertyName, TableColumn]]] = { + "events": [], + "person": [], + } for table, table_column, property_name, cost in result[:maximum]: logger.info(f"Materializing column. table={table}, property_name={property_name}, cost={cost}") diff --git a/ee/clickhouse/materialized_columns/columns.py b/ee/clickhouse/materialized_columns/columns.py index 8a076ae6af532..71bfd5adcc751 100644 --- a/ee/clickhouse/materialized_columns/columns.py +++ b/ee/clickhouse/materialized_columns/columns.py @@ -194,7 +194,9 @@ def backfill_materialized_columns( def _materialized_column_name( - table: TableWithProperties, property: PropertyName, table_column: TableColumn = DEFAULT_TABLE_COLUMN + table: TableWithProperties, + property: PropertyName, + table_column: TableColumn = DEFAULT_TABLE_COLUMN, ) -> str: "Returns a sanitized and unique column name to use for materialized column" diff --git a/ee/clickhouse/materialized_columns/test/test_analyze.py b/ee/clickhouse/materialized_columns/test/test_analyze.py index 9e4a62627f7c1..8ebd98b19e1b7 100644 --- a/ee/clickhouse/materialized_columns/test/test_analyze.py +++ b/ee/clickhouse/materialized_columns/test/test_analyze.py @@ -18,7 +18,10 @@ def setUp(self): """, 6723, ), - (f"SELECT JSONExtractString(properties, 'person_prop') FROM person WHERE team_id = {self.team.pk}", 9723), + ( + f"SELECT JSONExtractString(properties, 'person_prop') FROM person WHERE team_id = {self.team.pk}", + 9723, + ), ( f""" SELECT JSONExtractString(person_properties, 'person_prop') @@ -56,7 +59,10 @@ def setUp(self): "distinct_id": f"user_id", "team": self.team, "timestamp": "2021-05-01 00:00:00", - "person_properties": {"person_prop": "something", "another_person_prop": "something"}, + "person_properties": { + "person_prop": "something", + "another_person_prop": "something", + }, "group0_properties": {"group_prop": "something"}, "group2_properties": {"group_prop": "something2"}, } @@ -82,10 +88,16 @@ def test_query_class(self): self.assertEqual( list(event_query.properties(TeamManager())), - [("events", "properties", "event_prop"), ("events", "properties", "another_prop")], + [ + ("events", "properties", "event_prop"), + ("events", "properties", "another_prop"), + ], ) - self.assertEqual(list(person_query.properties(TeamManager())), [("person", "properties", "person_prop")]) + self.assertEqual( + list(person_query.properties(TeamManager())), + [("person", "properties", "person_prop")], + ) self.assertEqual( list(person_on_events_query.properties(TeamManager())), [ @@ -96,7 +108,10 @@ def test_query_class(self): ) self.assertEqual( list(group_on_events_query.properties(TeamManager())), - [("events", "group0_properties", "group_prop"), ("events", "group2_properties", "group_prop")], + [ + ("events", "group0_properties", "group_prop"), + ("events", "group2_properties", "group_prop"), + ], ) self.assertEqual(event_query.cost, 4) @@ -110,7 +125,8 @@ def test_query_class_edge_cases(self): self.assertIsNone(invalid_query.team_id) query_with_unknown_property = Query( - f"SELECT JSONExtractString(properties, '$unknown_prop') FROM events WHERE team_id = {self.team.pk}", 3400 + f"SELECT JSONExtractString(properties, '$unknown_prop') FROM events WHERE team_id = {self.team.pk}", + 3400, ) self.assertEqual(list(query_with_unknown_property.properties(TeamManager())), []) @@ -118,6 +134,7 @@ def test_query_class_edge_cases(self): # match group missing, should probably never happen, since the query is now wrong. query_with_invalid_column = Query( - f"SELECT JSONExtractString(, 'prop') FROM events WHERE team_id = {self.team.pk}", 3340 + f"SELECT JSONExtractString(, 'prop') FROM events WHERE team_id = {self.team.pk}", + 3340, ) self.assertEqual(list(query_with_invalid_column.properties(TeamManager())), []) diff --git a/ee/clickhouse/materialized_columns/test/test_columns.py b/ee/clickhouse/materialized_columns/test/test_columns.py index 0a44ea078b92d..8219e03354ee9 100644 --- a/ee/clickhouse/materialized_columns/test/test_columns.py +++ b/ee/clickhouse/materialized_columns/test/test_columns.py @@ -66,7 +66,10 @@ def test_caching_and_materializing(self): [property_name for property_name, _ in get_materialized_columns("events", use_cache=True).keys()], ["$foo", "$bar", *EVENTS_TABLE_DEFAULT_MATERIALIZED_COLUMNS], ) - self.assertCountEqual(get_materialized_columns("person", use_cache=True).keys(), [("$zeta", "properties")]) + self.assertCountEqual( + get_materialized_columns("person", use_cache=True).keys(), + [("$zeta", "properties")], + ) materialize("events", "abc", create_minmax_index=True) @@ -98,14 +101,21 @@ def test_materialized_column_naming(self): get_materialized_columns("events"), ) - self.assertEqual(get_materialized_columns("person"), {("SoMePrOp", "properties"): "pmat_SoMePrOp"}) + self.assertEqual( + get_materialized_columns("person"), + {("SoMePrOp", "properties"): "pmat_SoMePrOp"}, + ) def test_backfilling_data(self): sync_execute("ALTER TABLE events DROP COLUMN IF EXISTS mat_prop") sync_execute("ALTER TABLE events DROP COLUMN IF EXISTS mat_another") _create_event( - event="some_event", distinct_id="1", team=self.team, timestamp="2020-01-01 00:00:00", properties={"prop": 1} + event="some_event", + distinct_id="1", + team=self.team, + timestamp="2020-01-01 00:00:00", + properties={"prop": 1}, ) _create_event( event="some_event", @@ -115,9 +125,18 @@ def test_backfilling_data(self): properties={"prop": 2, "another": 5}, ) _create_event( - event="some_event", distinct_id="1", team=self.team, timestamp="2021-05-03 00:00:00", properties={"prop": 3} + event="some_event", + distinct_id="1", + team=self.team, + timestamp="2021-05-03 00:00:00", + properties={"prop": 3}, + ) + _create_event( + event="another_event", + distinct_id="1", + team=self.team, + timestamp="2021-05-04 00:00:00", ) - _create_event(event="another_event", distinct_id="1", team=self.team, timestamp="2021-05-04 00:00:00") _create_event( event="third_event", distinct_id="1", @@ -165,7 +184,15 @@ def test_backfilling_data(self): self.assertEqual( sync_execute("SELECT mat_prop, mat_another FROM events ORDER BY timestamp"), - [("1", ""), ("2", "5"), ("3", ""), ("", ""), ("4", ""), ("", "6"), ("", "7")], + [ + ("1", ""), + ("2", "5"), + ("3", ""), + ("", ""), + ("4", ""), + ("", "6"), + ("", "7"), + ], ) def test_column_types(self): @@ -194,7 +221,11 @@ def _count_materialized_rows(self, column): AND table = %(table)s AND column = %(column)s """, - {"database": CLICKHOUSE_DATABASE, "table": EVENTS_DATA_TABLE(), "column": column}, + { + "database": CLICKHOUSE_DATABASE, + "table": EVENTS_DATA_TABLE(), + "column": column, + }, )[0][0] def _get_count_of_mutations_running(self) -> int: @@ -213,5 +244,9 @@ def _get_column_types(self, column: str): FROM system.columns WHERE database = %(database)s AND table = %(table)s AND name = %(column)s """, - {"database": CLICKHOUSE_DATABASE, "table": EVENTS_DATA_TABLE(), "column": column}, + { + "database": CLICKHOUSE_DATABASE, + "table": EVENTS_DATA_TABLE(), + "column": column, + }, )[0] diff --git a/ee/clickhouse/materialized_columns/test/test_query.py b/ee/clickhouse/materialized_columns/test/test_query.py index c78623a831c3e..3a55a0614f137 100644 --- a/ee/clickhouse/materialized_columns/test/test_query.py +++ b/ee/clickhouse/materialized_columns/test/test_query.py @@ -3,7 +3,6 @@ class TestQuery(ClickhouseTestMixin, APIBaseTest): def test_get_queries_detects(self): - # some random with self.capture_select_queries() as queries: self.client.post( diff --git a/ee/clickhouse/models/test/test_action.py b/ee/clickhouse/models/test/test_action.py index 745159f02b86d..692844e55c1e4 100644 --- a/ee/clickhouse/models/test/test_action.py +++ b/ee/clickhouse/models/test/test_action.py @@ -7,7 +7,12 @@ from posthog.models.action.util import filter_event, format_action_filter from posthog.models.action_step import ActionStep from posthog.models.test.test_event_model import filter_by_actions_factory -from posthog.test.base import BaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.test.base import ( + BaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) from hogvm.python.operation import Operation as op, HOGQL_BYTECODE_IDENTIFIER as _H @@ -31,7 +36,11 @@ def _get_events_for_action(action: Action) -> List[MockEvent]: AND events.team_id = %(team_id)s ORDER BY events.timestamp DESC """ - events = sync_execute(query, {"team_id": action.team_id, **params, **hogql_context.values}, team_id=action.team_id) + events = sync_execute( + query, + {"team_id": action.team_id, **params, **hogql_context.values}, + team_id=action.team_id, + ) return [MockEvent(str(uuid), distinct_id) for uuid, distinct_id in events] @@ -39,7 +48,8 @@ def _get_events_for_action(action: Action) -> List[MockEvent]: class TestActions( - ClickhouseTestMixin, filter_by_actions_factory(_create_event, _create_person, _get_events_for_action) # type: ignore + ClickhouseTestMixin, + filter_by_actions_factory(_create_event, _create_person, _get_events_for_action), # type: ignore ): pass @@ -69,7 +79,10 @@ def test_filter_event_exact_url(self): action1 = Action.objects.create(team=self.team, name="action1") step1 = ActionStep.objects.create( - event="$autocapture", action=action1, url="https://posthog.com/feedback/123", url_matching=ActionStep.EXACT + event="$autocapture", + action=action1, + url="https://posthog.com/feedback/123", + url_matching=ActionStep.EXACT, ) query, params = filter_event(step1) @@ -123,7 +136,6 @@ def test_filter_event_exact_url_with_query_params(self): ) def test_filter_event_contains_url(self): - _create_event( event="$autocapture", team=self.team, @@ -154,7 +166,6 @@ def test_filter_event_contains_url(self): self.assertEqual(len(result), 2) def test_filter_event_regex_url(self): - _create_event( event="$autocapture", team=self.team, @@ -178,7 +189,10 @@ def test_filter_event_regex_url(self): action1 = Action.objects.create(team=self.team, name="action1") step1 = ActionStep.objects.create( - event="$autocapture", action=action1, url="/123", url_matching=ActionStep.REGEX + event="$autocapture", + action=action1, + url="/123", + url_matching=ActionStep.REGEX, ) query, params = filter_event(step1) @@ -188,26 +202,55 @@ def test_filter_event_regex_url(self): def test_double(self): # Tests a regression where the second step properties would override those of the first step, causing issues - _create_event(event="insight viewed", team=self.team, distinct_id="whatever", properties={"filters_count": 2}) + _create_event( + event="insight viewed", + team=self.team, + distinct_id="whatever", + properties={"filters_count": 2}, + ) action1 = Action.objects.create(team=self.team, name="action1") ActionStep.objects.create( event="insight viewed", action=action1, - properties=[{"key": "insight", "type": "event", "value": ["RETENTION"], "operator": "exact"}], + properties=[ + { + "key": "insight", + "type": "event", + "value": ["RETENTION"], + "operator": "exact", + } + ], ) ActionStep.objects.create( event="insight viewed", action=action1, - properties=[{"key": "filters_count", "type": "event", "value": "1", "operator": "gt"}], + properties=[ + { + "key": "filters_count", + "type": "event", + "value": "1", + "operator": "gt", + } + ], ) events = _get_events_for_action(action1) self.assertEqual(len(events), 1) def test_filter_with_hogql(self): - _create_event(event="insight viewed", team=self.team, distinct_id="first", properties={"filters_count": 20}) - _create_event(event="insight viewed", team=self.team, distinct_id="second", properties={"filters_count": 1}) + _create_event( + event="insight viewed", + team=self.team, + distinct_id="first", + properties={"filters_count": 20}, + ) + _create_event( + event="insight viewed", + team=self.team, + distinct_id="second", + properties={"filters_count": 1}, + ) action1 = Action.objects.create(team=self.team, name="action1") ActionStep.objects.create( diff --git a/ee/clickhouse/models/test/test_cohort.py b/ee/clickhouse/models/test/test_cohort.py index 5d48d069bac0a..51ff0f2e8816f 100644 --- a/ee/clickhouse/models/test/test_cohort.py +++ b/ee/clickhouse/models/test/test_cohort.py @@ -39,12 +39,20 @@ def _create_action(**kwargs): class TestCohort(ClickhouseTestMixin, BaseTest): def _get_cohortpeople(self, cohort: Cohort): return sync_execute( - GET_COHORTPEOPLE_BY_COHORT_ID, {"team_id": self.team.pk, "cohort_id": cohort.pk, "version": cohort.version} + GET_COHORTPEOPLE_BY_COHORT_ID, + { + "team_id": self.team.pk, + "cohort_id": cohort.pk, + "version": cohort.version, + }, ) def test_prop_cohort_basic(self): - - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_person( distinct_ids=["some_id"], @@ -52,9 +60,19 @@ def test_prop_cohort_basic(self): properties={"$some_prop": "something", "$another_prop": "something"}, ) _create_person(distinct_ids=["no_match"], team_id=self.team.pk) - _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr": "some_val"}, + ) - _create_event(event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_other_id", + properties={"attr": "some_val"}, + ) cohort1 = Cohort.objects.create( team=self.team, @@ -62,7 +80,11 @@ def test_prop_cohort_basic(self): { "properties": [ {"key": "$some_prop", "value": "something", "type": "person"}, - {"key": "$another_prop", "value": "something", "type": "person"}, + { + "key": "$another_prop", + "value": "something", + "type": "person", + }, ] } ], @@ -71,15 +93,23 @@ def test_prop_cohort_basic(self): filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}) query, params = parse_prop_grouped_clauses( - team_id=self.team.pk, property_group=filter.property_groups, hogql_context=filter.hogql_context + team_id=self.team.pk, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 1) def test_prop_cohort_basic_action(self): - - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_person( distinct_ids=["some_id"], @@ -107,7 +137,10 @@ def test_prop_cohort_basic_action(self): cohort1 = Cohort.objects.create(team=self.team, groups=[{"action_id": action.pk, "days": 3}], name="cohort1") - filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( team_id=self.team.pk, property_group=filter.property_groups, @@ -117,13 +150,19 @@ def test_prop_cohort_basic_action(self): hogql_context=filter.hogql_context, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 1) def test_prop_cohort_basic_event_days(self): - - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_person( distinct_ids=["some_id"], @@ -147,9 +186,16 @@ def test_prop_cohort_basic_event_days(self): timestamp=datetime.now() - timedelta(days=4, hours=12), ) - cohort1 = Cohort.objects.create(team=self.team, groups=[{"event_id": "$pageview", "days": 1}], name="cohort1") + cohort1 = Cohort.objects.create( + team=self.team, + groups=[{"event_id": "$pageview", "days": 1}], + name="cohort1", + ) - filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( team_id=self.team.pk, property_group=filter.property_groups, @@ -159,12 +205,22 @@ def test_prop_cohort_basic_event_days(self): hogql_context=filter.hogql_context, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 1) - cohort2 = Cohort.objects.create(team=self.team, groups=[{"event_id": "$pageview", "days": 7}], name="cohort2") + cohort2 = Cohort.objects.create( + team=self.team, + groups=[{"event_id": "$pageview", "days": 7}], + name="cohort2", + ) - filter = Filter(data={"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( team_id=self.team.pk, property_group=filter.property_groups, @@ -174,12 +230,18 @@ def test_prop_cohort_basic_event_days(self): hogql_context=filter.hogql_context, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 2) def test_prop_cohort_basic_action_days(self): - - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_person( distinct_ids=["some_id"], @@ -206,7 +268,10 @@ def test_prop_cohort_basic_action_days(self): cohort1 = Cohort.objects.create(team=self.team, groups=[{"action_id": action.pk, "days": 1}], name="cohort1") - filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( team_id=self.team.pk, property_group=filter.property_groups, @@ -216,12 +281,18 @@ def test_prop_cohort_basic_action_days(self): hogql_context=filter.hogql_context, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 1) cohort2 = Cohort.objects.create(team=self.team, groups=[{"action_id": action.pk, "days": 7}], name="cohort2") - filter = Filter(data={"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( team_id=self.team.pk, property_group=filter.property_groups, @@ -231,17 +302,37 @@ def test_prop_cohort_basic_action_days(self): hogql_context=filter.hogql_context, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 2) def test_prop_cohort_multiple_groups(self): + _create_person( + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) - - _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"$another_prop": "something"}) - _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}) + _create_person( + distinct_ids=["some_id"], + team_id=self.team.pk, + properties={"$another_prop": "something"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr": "some_val"}, + ) - _create_event(event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_other_id", + properties={"attr": "some_val"}, + ) cohort1 = Cohort.objects.create( team=self.team, @@ -252,46 +343,101 @@ def test_prop_cohort_multiple_groups(self): name="cohort1", ) - filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( - team_id=self.team.pk, property_group=filter.property_groups, hogql_context=filter.hogql_context + team_id=self.team.pk, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 2) def test_prop_cohort_with_negation(self): team2 = Organization.objects.bootstrap(None)[2] - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) - _create_person(distinct_ids=["some_id"], team_id=team2.pk, properties={"$another_prop": "something"}) - _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}) + _create_person( + distinct_ids=["some_id"], + team_id=team2.pk, + properties={"$another_prop": "something"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr": "some_val"}, + ) - _create_event(event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_other_id", + properties={"attr": "some_val"}, + ) cohort1 = Cohort.objects.create( team=self.team, groups=[ - {"properties": [{"type": "person", "key": "$some_prop", "operator": "is_not", "value": "something"}]} + { + "properties": [ + { + "type": "person", + "key": "$some_prop", + "operator": "is_not", + "value": "something", + } + ] + } ], name="cohort1", ) - filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( - team_id=self.team.pk, property_group=filter.property_groups, hogql_context=filter.hogql_context + team_id=self.team.pk, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) self.assertIn("\nFROM person_distinct_id2\n", final_query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 0) def test_cohort_get_person_ids_by_cohort_id(self): - user1 = _create_person(distinct_ids=["user1"], team_id=self.team.pk, properties={"$some_prop": "something"}) - _create_person(distinct_ids=["user2"], team_id=self.team.pk, properties={"$some_prop": "another"}) - user3 = _create_person(distinct_ids=["user3"], team_id=self.team.pk, properties={"$some_prop": "something"}) + user1 = _create_person( + distinct_ids=["user1"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) + _create_person( + distinct_ids=["user2"], + team_id=self.team.pk, + properties={"$some_prop": "another"}, + ) + user3 = _create_person( + distinct_ids=["user3"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) cohort = Cohort.objects.create( team=self.team, groups=[{"properties": [{"key": "$some_prop", "value": "something", "type": "person"}]}], @@ -322,7 +468,8 @@ def test_insert_by_distinct_id_or_email(self): Person.objects.create(team_id=self.team.pk, distinct_ids=["'); truncate person_static_cohort; --"]) cohort.insert_users_by_list(["'); truncate person_static_cohort; --", "123"]) results = sync_execute( - "select count(1) from person_static_cohort where team_id = %(team_id)s", {"team_id": self.team.pk} + "select count(1) from person_static_cohort where team_id = %(team_id)s", + {"team_id": self.team.pk}, )[0][0] self.assertEqual(results, 3) @@ -355,7 +502,11 @@ def test_cohortpeople_basic(self): { "properties": [ {"key": "$some_prop", "value": "something", "type": "person"}, - {"key": "$another_prop", "value": "something", "type": "person"}, + { + "key": "$another_prop", + "value": "something", + "type": "person", + }, ] } ], @@ -482,7 +633,6 @@ def _setup_actions_with_different_counts(self): return action def test_cohortpeople_action_count(self): - action = self._setup_actions_with_different_counts() # test operators @@ -534,7 +684,11 @@ def test_cohortpeople_deleted_person(self): { "properties": [ {"key": "$some_prop", "value": "something", "type": "person"}, - {"key": "$another_prop", "value": "something", "type": "person"}, + { + "key": "$another_prop", + "value": "something", + "type": "person", + }, ] } ], @@ -563,8 +717,16 @@ def test_cohortpeople_prop_changed(self): groups=[ { "properties": [ - {"key": "$some_prop", "value": "something", "type": "person"}, - {"key": "$another_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, + { + "key": "$another_prop", + "value": "something", + "type": "person", + }, ] } ], @@ -592,7 +754,9 @@ def test_cohort_change(self): properties={"$some_prop": "something", "$another_prop": "something"}, ) p2 = Person.objects.create( - team_id=self.team.pk, distinct_ids=["2"], properties={"$some_prop": "another", "$another_prop": "another"} + team_id=self.team.pk, + distinct_ids=["2"], + properties={"$some_prop": "another", "$another_prop": "another"}, ) cohort1 = Cohort.objects.create( @@ -601,7 +765,11 @@ def test_cohort_change(self): { "properties": [ {"key": "$some_prop", "value": "something", "type": "person"}, - {"key": "$another_prop", "value": "something", "type": "person"}, + { + "key": "$another_prop", + "value": "something", + "type": "person", + }, ] } ], @@ -651,7 +819,9 @@ def test_cohortpeople_with_valid_other_cohort_filter(self): Person.objects.create(team_id=self.team.pk, distinct_ids=["2"], properties={"foo": "non"}) cohort0: Cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "foo", "value": "bar", "type": "person"}]}], name="cohort0" + team=self.team, + groups=[{"properties": [{"key": "foo", "value": "bar", "type": "person"}]}], + name="cohort0", ) cohort0.calculate_people_ch(pending_version=0) @@ -668,8 +838,16 @@ def test_cohortpeople_with_valid_other_cohort_filter(self): @snapshot_clickhouse_insert_cohortpeople_queries def test_cohortpeople_with_not_in_cohort_operator(self): - _create_person(distinct_ids=["1"], team_id=self.team.pk, properties={"$some_prop": "something1"}) - _create_person(distinct_ids=["2"], team_id=self.team.pk, properties={"$some_prop": "something2"}) + _create_person( + distinct_ids=["1"], + team_id=self.team.pk, + properties={"$some_prop": "something1"}, + ) + _create_person( + distinct_ids=["2"], + team_id=self.team.pk, + properties={"$some_prop": "something2"}, + ) _create_event( event="$pageview", @@ -725,25 +903,45 @@ def test_cohortpeople_with_not_in_cohort_operator(self): cohort1.calculate_people_ch(pending_version=0) with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): - filter = Filter( - data={"properties": [{"key": "id", "value": cohort1.pk, "type": "precalculated-cohort"}]}, + data={ + "properties": [ + { + "key": "id", + "value": cohort1.pk, + "type": "precalculated-cohort", + } + ] + }, team=self.team, ) query, params = parse_prop_grouped_clauses( - team_id=self.team.pk, property_group=filter.property_groups, hogql_context=filter.hogql_context + team_id=self.team.pk, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) final_query = "SELECT uuid, distinct_id FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 1) self.assertEqual(result[0][1], "2") # distinct_id '2' is the one in cohort @snapshot_clickhouse_queries def test_cohortpeople_with_not_in_cohort_operator_and_no_precalculation(self): - _create_person(distinct_ids=["1"], team_id=self.team.pk, properties={"$some_prop": "something1"}) - _create_person(distinct_ids=["2"], team_id=self.team.pk, properties={"$some_prop": "something2"}) + _create_person( + distinct_ids=["1"], + team_id=self.team.pk, + properties={"$some_prop": "something1"}, + ) + _create_person( + distinct_ids=["2"], + team_id=self.team.pk, + properties={"$some_prop": "something2"}, + ) _create_event( event="$pageview", @@ -795,21 +993,37 @@ def test_cohortpeople_with_not_in_cohort_operator_and_no_precalculation(self): name="cohort1", ) - filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( - team_id=self.team.pk, property_group=filter.property_groups, hogql_context=filter.hogql_context + team_id=self.team.pk, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) final_query = "SELECT uuid, distinct_id FROM events WHERE team_id = %(team_id)s {}".format(query) self.assertIn("\nFROM person_distinct_id2\n", final_query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 1) self.assertEqual(result[0][1], "2") # distinct_id '2' is the one in cohort @snapshot_clickhouse_insert_cohortpeople_queries def test_cohortpeople_with_not_in_cohort_operator_for_behavioural_cohorts(self): - _create_person(distinct_ids=["1"], team_id=self.team.pk, properties={"$some_prop": "something"}) - _create_person(distinct_ids=["2"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["1"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) + _create_person( + distinct_ids=["2"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_event( event="signup", @@ -885,14 +1099,21 @@ def test_cohortpeople_with_not_in_cohort_operator_for_behavioural_cohorts(self): cohort1.calculate_people_ch(pending_version=0) with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): - - filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, + team=self.team, + ) query, params = parse_prop_grouped_clauses( - team_id=self.team.pk, property_group=filter.property_groups, hogql_context=filter.hogql_context + team_id=self.team.pk, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) final_query = "SELECT uuid, distinct_id FROM events WHERE team_id = %(team_id)s {}".format(query) - result = sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + result = sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) self.assertEqual(len(result), 1) self.assertEqual(result[0][1], "2") # distinct_id '2' is the one in cohort @@ -902,7 +1123,9 @@ def test_cohortpeople_with_nonexistent_other_cohort_filter(self): Person.objects.create(team_id=self.team.pk, distinct_ids=["2"], properties={"foo": "non"}) cohort1: Cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "id", "type": "cohort", "value": 666}]}], name="cohort1" + team=self.team, + groups=[{"properties": [{"key": "id", "type": "cohort", "value": 666}]}], + name="cohort1", ) cohort1.calculate_people_ch(pending_version=0) @@ -921,15 +1144,19 @@ def test_clickhouse_empty_query(self): self.assertFalse(Cohort.objects.get().is_calculating) def test_query_with_multiple_new_style_cohorts(self): - action1 = Action.objects.create(team=self.team, name="action1") ActionStep.objects.create( - event="$autocapture", action=action1, url="https://posthog.com/feedback/123", url_matching=ActionStep.EXACT + event="$autocapture", + action=action1, + url="https://posthog.com/feedback/123", + url_matching=ActionStep.EXACT, ) # satiesfies all conditions p1 = Person.objects.create( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -948,7 +1175,9 @@ def test_query_with_multiple_new_style_cohorts(self): # doesn't satisfy action Person.objects.create( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -967,7 +1196,9 @@ def test_query_with_multiple_new_style_cohorts(self): # satisfies special condition (not pushed down person property in OR group) p3 = Person.objects.create( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "special", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "special", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -991,7 +1222,11 @@ def test_query_with_multiple_new_style_cohorts(self): "value": "performed_event_first_time", "type": "behavioral", }, - {"key": "email", "value": "test@posthog.com", "type": "person"}, # this is pushed down + { + "key": "email", + "value": "test@posthog.com", + "type": "person", + }, # this is pushed down ], } }, @@ -1023,10 +1258,17 @@ def test_query_with_multiple_new_style_cohorts(self): "value": "performed_event", "type": "behavioral", }, - {"key": "name", "value": "special", "type": "person"}, # this is NOT pushed down + { + "key": "name", + "value": "special", + "type": "person", + }, # this is NOT pushed down ], }, - {"type": "AND", "values": [{"key": "id", "value": cohort2.pk, "type": "cohort"}]}, + { + "type": "AND", + "values": [{"key": "id", "value": cohort2.pk, "type": "cohort"}], + }, ], } }, @@ -1039,9 +1281,21 @@ def test_query_with_multiple_new_style_cohorts(self): self.assertCountEqual([p1.uuid, p3.uuid], [r[0] for r in result]) def test_update_cohort(self): - Person.objects.create(team_id=self.team.pk, distinct_ids=["1"], properties={"$some_prop": "something"}) - Person.objects.create(team_id=self.team.pk, distinct_ids=["2"], properties={"$another_prop": "something"}) - Person.objects.create(team_id=self.team.pk, distinct_ids=["3"], properties={"$another_prop": "something"}) + Person.objects.create( + team_id=self.team.pk, + distinct_ids=["1"], + properties={"$some_prop": "something"}, + ) + Person.objects.create( + team_id=self.team.pk, + distinct_ids=["2"], + properties={"$another_prop": "something"}, + ) + Person.objects.create( + team_id=self.team.pk, + distinct_ids=["3"], + properties={"$another_prop": "something"}, + ) cohort1 = Cohort.objects.create( team=self.team, @@ -1072,9 +1326,21 @@ def test_update_cohort(self): self.assertEqual(len(results), 1) def test_cohort_versioning(self): - Person.objects.create(team_id=self.team.pk, distinct_ids=["1"], properties={"$some_prop": "something"}) - Person.objects.create(team_id=self.team.pk, distinct_ids=["2"], properties={"$another_prop": "something"}) - Person.objects.create(team_id=self.team.pk, distinct_ids=["3"], properties={"$another_prop": "something"}) + Person.objects.create( + team_id=self.team.pk, + distinct_ids=["1"], + properties={"$some_prop": "something"}, + ) + Person.objects.create( + team_id=self.team.pk, + distinct_ids=["2"], + properties={"$another_prop": "something"}, + ) + Person.objects.create( + team_id=self.team.pk, + distinct_ids=["3"], + properties={"$another_prop": "something"}, + ) # start the cohort at some later version cohort1 = Cohort.objects.create( diff --git a/ee/clickhouse/models/test/test_dead_letter_queue.py b/ee/clickhouse/models/test/test_dead_letter_queue.py index b5acc8ccfe880..220d7ada323b6 100644 --- a/ee/clickhouse/models/test/test_dead_letter_queue.py +++ b/ee/clickhouse/models/test/test_dead_letter_queue.py @@ -4,7 +4,9 @@ from kafka import KafkaProducer -from ee.clickhouse.models.test.utils.util import delay_until_clickhouse_consumes_from_kafka +from ee.clickhouse.models.test.utils.util import ( + delay_until_clickhouse_consumes_from_kafka, +) from posthog.clickhouse.dead_letter_queue import ( DEAD_LETTER_QUEUE_TABLE, DEAD_LETTER_QUEUE_TABLE_MV_SQL, @@ -98,7 +100,10 @@ def test_kafka_insert(self): kafka_producer = KafkaProducer(bootstrap_servers=KAFKA_HOSTS) - kafka_producer.send(topic=KAFKA_DEAD_LETTER_QUEUE, value=json.dumps(inserted_dlq_event).encode("utf-8")) + kafka_producer.send( + topic=KAFKA_DEAD_LETTER_QUEUE, + value=json.dumps(inserted_dlq_event).encode("utf-8"), + ) delay_until_clickhouse_consumes_from_kafka(DEAD_LETTER_QUEUE_TABLE, row_count_before_insert + 1) diff --git a/ee/clickhouse/models/test/test_filters.py b/ee/clickhouse/models/test/test_filters.py index 1cb32f3a27e19..f388e383f1c12 100644 --- a/ee/clickhouse/models/test/test_filters.py +++ b/ee/clickhouse/models/test/test_filters.py @@ -27,7 +27,8 @@ def _filter_events(filter: Filter, team: Team, order_by: Optional[str] = None): events = query_with_columns( GET_EVENTS_WITH_PROPERTIES.format( - filters=prop_filters, order_by="ORDER BY {}".format(order_by) if order_by else "" + filters=prop_filters, + order_by="ORDER BY {}".format(order_by) if order_by else "", ), params, ) @@ -56,13 +57,29 @@ class TestFilters(PGTestFilters): def test_simplify_cohorts(self): cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ] + } + ], ) cohort.calculate_people_ch(pending_version=0) filter = Filter(data={"properties": [{"type": "cohort", "key": "id", "value": cohort.pk}]}) filter_with_groups = Filter( - data={"properties": {"type": "AND", "values": [{"type": "cohort", "key": "id", "value": cohort.pk}]}} + data={ + "properties": { + "type": "AND", + "values": [{"type": "cohort", "key": "id", "value": cohort.pk}], + } + } ) self.assertEqual( @@ -70,7 +87,14 @@ def test_simplify_cohorts(self): { "properties": { "type": "AND", - "values": [{"type": "person", "key": "email", "operator": "icontains", "value": ".com"}], + "values": [ + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": ".com", + } + ], } }, ) @@ -80,7 +104,14 @@ def test_simplify_cohorts(self): { "properties": { "type": "AND", - "values": [{"type": "person", "key": "email", "operator": "icontains", "value": ".com"}], + "values": [ + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": ".com", + } + ], } }, ) @@ -91,7 +122,13 @@ def test_simplify_cohorts(self): { "properties": { "type": "AND", - "values": [{"key": "id", "value": cohort.pk, "type": "precalculated-cohort"}], + "values": [ + { + "key": "id", + "value": cohort.pk, + "type": "precalculated-cohort", + } + ], } }, ) @@ -101,7 +138,13 @@ def test_simplify_cohorts(self): { "properties": { "type": "AND", - "values": [{"key": "id", "value": cohort.pk, "type": "precalculated-cohort"}], + "values": [ + { + "key": "id", + "value": cohort.pk, + "type": "precalculated-cohort", + } + ], } }, ) @@ -112,7 +155,12 @@ def test_simplify_static_cohort(self): self.assertEqual( filter.simplify(self.team).properties_to_dict(), - {"properties": {"type": "AND", "values": [{"type": "static-cohort", "key": "id", "value": cohort.pk}]}}, + { + "properties": { + "type": "AND", + "values": [{"type": "static-cohort", "key": "id", "value": cohort.pk}], + } + }, ) def test_simplify_hasdone_cohort(self): @@ -121,7 +169,12 @@ def test_simplify_hasdone_cohort(self): self.assertEqual( filter.simplify(self.team).properties_to_dict(), - {"properties": {"type": "AND", "values": [{"type": "cohort", "key": "id", "value": cohort.pk}]}}, + { + "properties": { + "type": "AND", + "values": [{"type": "cohort", "key": "id", "value": cohort.pk}], + } + }, ) def test_simplify_multi_group_cohort(self): @@ -145,11 +198,23 @@ def test_simplify_multi_group_cohort(self): "values": [ { "type": "AND", - "values": [{"type": "person", "key": "$some_prop", "value": "something"}], + "values": [ + { + "type": "person", + "key": "$some_prop", + "value": "something", + } + ], }, { "type": "AND", - "values": [{"type": "person", "key": "$another_prop", "value": "something"}], + "values": [ + { + "type": "person", + "key": "$another_prop", + "value": "something", + } + ], }, ], } @@ -161,10 +226,22 @@ def test_simplify_multi_group_cohort(self): def test_recursive_cohort(self): cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ] + } + ], ) recursive_cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"type": "cohort", "key": "id", "value": cohort.pk}]}] + team=self.team, + groups=[{"properties": [{"type": "cohort", "key": "id", "value": cohort.pk}]}], ) filter = Filter(data={"properties": [{"type": "cohort", "key": "id", "value": recursive_cohort.pk}]}) @@ -173,7 +250,14 @@ def test_recursive_cohort(self): { "properties": { "type": "AND", - "values": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "values": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], } }, ) @@ -181,7 +265,18 @@ def test_recursive_cohort(self): def test_simplify_cohorts_with_recursive_negation(self): cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ] + } + ], ) recursive_cohort = Cohort.objects.create( team=self.team, @@ -189,13 +284,27 @@ def test_simplify_cohorts_with_recursive_negation(self): { "properties": [ {"key": "email", "value": "xyz", "type": "person"}, - {"type": "cohort", "key": "id", "value": cohort.pk, "negation": True}, + { + "type": "cohort", + "key": "id", + "value": cohort.pk, + "negation": True, + }, ] } ], ) filter = Filter( - data={"properties": [{"type": "cohort", "key": "id", "value": recursive_cohort.pk, "negation": True}]} + data={ + "properties": [ + { + "type": "cohort", + "key": "id", + "value": recursive_cohort.pk, + "negation": True, + } + ] + } ) self.assertEqual( @@ -203,7 +312,14 @@ def test_simplify_cohorts_with_recursive_negation(self): { "properties": { "type": "AND", - "values": [{"type": "cohort", "key": "id", "value": recursive_cohort.pk, "negation": True}], + "values": [ + { + "type": "cohort", + "key": "id", + "value": recursive_cohort.pk, + "negation": True, + } + ], } }, ) @@ -211,16 +327,45 @@ def test_simplify_cohorts_with_recursive_negation(self): def test_simplify_cohorts_with_simple_negation(self): cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ] + } + ], + ) + filter = Filter( + data={ + "properties": [ + { + "type": "cohort", + "key": "id", + "value": cohort.pk, + "negation": True, + } + ] + } ) - filter = Filter(data={"properties": [{"type": "cohort", "key": "id", "value": cohort.pk, "negation": True}]}) self.assertEqual( filter.simplify(self.team).properties_to_dict(), { "properties": { "type": "AND", - "values": [{"type": "cohort", "key": "id", "value": cohort.pk, "negation": True}], + "values": [ + { + "type": "cohort", + "key": "id", + "value": cohort.pk, + "negation": True, + } + ], } }, ) @@ -230,16 +375,39 @@ def test_simplify_no_such_cohort(self): self.assertEqual( filter.simplify(self.team).properties_to_dict(), - {"properties": {"type": "AND", "values": [{"type": "cohort", "key": "id", "value": 555_555}]}}, + { + "properties": { + "type": "AND", + "values": [{"type": "cohort", "key": "id", "value": 555_555}], + } + }, ) def test_simplify_entities(self): cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ] + } + ], ) filter = Filter( - data={"events": [{"id": "$pageview", "properties": [{"type": "cohort", "key": "id", "value": cohort.pk}]}]} + data={ + "events": [ + { + "id": "$pageview", + "properties": [{"type": "cohort", "key": "id", "value": cohort.pk}], + } + ] + } ) self.assertEqual( @@ -258,7 +426,14 @@ def test_simplify_entities(self): "name": "$pageview", "properties": { "type": "AND", - "values": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + "values": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], }, } ] @@ -266,7 +441,17 @@ def test_simplify_entities(self): ) def test_simplify_entities_with_group_math(self): - filter = Filter(data={"events": [{"id": "$pageview", "math": "unique_group", "math_group_type_index": 2}]}) + filter = Filter( + data={ + "events": [ + { + "id": "$pageview", + "math": "unique_group", + "math_group_type_index": 2, + } + ] + } + ) self.assertEqual( filter.simplify(self.team).entities_to_dict(), @@ -284,7 +469,14 @@ def test_simplify_entities_with_group_math(self): "name": "$pageview", "properties": { "type": "AND", - "values": [{"key": "$group_2", "operator": "is_not", "value": "", "type": "event"}], + "values": [ + { + "key": "$group_2", + "operator": "is_not", + "value": "", + "type": "event", + } + ], }, } ] @@ -299,7 +491,14 @@ def test_simplify_when_aggregating_by_group(self): { "properties": { "type": "AND", - "values": [{"key": "$group_0", "operator": "is_not", "value": "", "type": "event"}], + "values": [ + { + "key": "$group_0", + "operator": "is_not", + "value": "", + "type": "event", + } + ], } }, ) @@ -312,7 +511,14 @@ def test_simplify_funnel_entities_when_aggregating_by_group(self): { "properties": { "type": "AND", - "values": [{"key": "$group_2", "operator": "is_not", "value": "", "type": "event"}], + "values": [ + { + "key": "$group_2", + "operator": "is_not", + "value": "", + "type": "event", + } + ], } }, ) @@ -322,13 +528,22 @@ class TestFiltering(ClickhouseTestMixin, property_to_Q_test_factory(_filter_pers def test_simple(self): _create_event(team=self.team, distinct_id="test", event="$pageview") _create_event( - team=self.team, distinct_id="test", event="$pageview", properties={"$current_url": 1} + team=self.team, + distinct_id="test", + event="$pageview", + properties={"$current_url": 1}, ) # test for type incompatibility _create_event( - team=self.team, distinct_id="test", event="$pageview", properties={"$current_url": {"bla": "bla"}} + team=self.team, + distinct_id="test", + event="$pageview", + properties={"$current_url": {"bla": "bla"}}, ) # test for type incompatibility _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://whatever.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://whatever.com"}, ) filter = Filter(data={"properties": {"$current_url": "https://whatever.com"}}) events = _filter_events(filter, self.team) @@ -337,25 +552,52 @@ def test_simple(self): def test_multiple_equality(self): _create_event(team=self.team, distinct_id="test", event="$pageview") _create_event( - team=self.team, distinct_id="test", event="$pageview", properties={"$current_url": 1} + team=self.team, + distinct_id="test", + event="$pageview", + properties={"$current_url": 1}, ) # test for type incompatibility _create_event( - team=self.team, distinct_id="test", event="$pageview", properties={"$current_url": {"bla": "bla"}} + team=self.team, + distinct_id="test", + event="$pageview", + properties={"$current_url": {"bla": "bla"}}, ) # test for type incompatibility _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://whatever.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://whatever.com"}, ) _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://example.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://example.com"}, ) filter = Filter(data={"properties": {"$current_url": ["https://whatever.com", "https://example.com"]}}) events = _filter_events(filter, self.team) self.assertEqual(len(events), 2) def test_numerical(self): - event1_uuid = _create_event(team=self.team, distinct_id="test", event="$pageview", properties={"$a_number": 5}) - event2_uuid = _create_event(team=self.team, event="$pageview", distinct_id="test", properties={"$a_number": 6}) - _create_event(team=self.team, event="$pageview", distinct_id="test", properties={"$a_number": "rubbish"}) + event1_uuid = _create_event( + team=self.team, + distinct_id="test", + event="$pageview", + properties={"$a_number": 5}, + ) + event2_uuid = _create_event( + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$a_number": 6}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$a_number": "rubbish"}, + ) filter = Filter(data={"properties": {"$a_number__gt": 5}}) events = _filter_events(filter, self.team) self.assertEqual(events[0]["id"], event2_uuid) @@ -371,7 +613,10 @@ def test_numerical(self): def test_contains(self): _create_event(team=self.team, distinct_id="test", event="$pageview") event2_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://whatever.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://whatever.com"}, ) filter = Filter(data={"properties": {"$current_url__icontains": "whatever"}}) events = _filter_events(filter, self.team) @@ -380,7 +625,10 @@ def test_contains(self): def test_regex(self): event1_uuid = _create_event(team=self.team, distinct_id="test", event="$pageview") event2_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://whatever.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://whatever.com"}, ) filter = Filter(data={"properties": {"$current_url__regex": r"\.com$"}}) events = _filter_events(filter, self.team) @@ -394,7 +642,10 @@ def test_regex(self): def test_invalid_regex(self): _create_event(team=self.team, distinct_id="test", event="$pageview") _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://whatever.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://whatever.com"}, ) filter = Filter(data={"properties": {"$current_url__regex": "?*"}}) @@ -406,26 +657,44 @@ def test_invalid_regex(self): def test_is_not(self): event1_uuid = _create_event(team=self.team, distinct_id="test", event="$pageview") event2_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://something.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://something.com"}, ) _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://whatever.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://whatever.com"}, ) filter = Filter(data={"properties": {"$current_url__is_not": "https://whatever.com"}}) events = _filter_events(filter, self.team) - self.assertEqual(sorted([events[0]["id"], events[1]["id"]]), sorted([event1_uuid, event2_uuid])) + self.assertEqual( + sorted([events[0]["id"], events[1]["id"]]), + sorted([event1_uuid, event2_uuid]), + ) self.assertEqual(len(events), 2) def test_does_not_contain(self): event1_uuid = _create_event(team=self.team, event="$pageview", distinct_id="test") event2_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://something.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://something.com"}, ) _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://whatever.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://whatever.com"}, ) event3_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": None} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": None}, ) filter = Filter(data={"properties": {"$current_url__not_icontains": "whatever.com"}}) events = _filter_events(filter, self.team) @@ -437,24 +706,48 @@ def test_multiple(self): team=self.team, event="$pageview", distinct_id="test", - properties={"$current_url": "https://something.com", "another_key": "value"}, + properties={ + "$current_url": "https://something.com", + "another_key": "value", + }, ) _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"$current_url": "https://something.com"} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"$current_url": "https://something.com"}, + ) + filter = Filter( + data={ + "properties": { + "$current_url__icontains": "something.com", + "another_key": "value", + } + } ) - filter = Filter(data={"properties": {"$current_url__icontains": "something.com", "another_key": "value"}}) events = _filter_events(filter, self.team) self.assertEqual(events[0]["id"], event2_uuid) self.assertEqual(len(events), 1) def test_user_properties(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"group": "some group"}) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"group": "another group"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"group": "some group"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"group": "another group"}, + ) event2_uuid = _create_event( team=self.team, distinct_id="person1", event="$pageview", - properties={"$current_url": "https://something.com", "another_key": "value"}, + properties={ + "$current_url": "https://something.com", + "another_key": "value", + }, ) event_p2_uuid = _create_event( team=self.team, @@ -465,12 +758,19 @@ def test_user_properties(self): # test for leakage _, _, team2 = Organization.objects.bootstrap(None) - _create_person(team_id=team2.pk, distinct_ids=["person_team_2"], properties={"group": "another group"}) + _create_person( + team_id=team2.pk, + distinct_ids=["person_team_2"], + properties={"group": "another group"}, + ) _create_event( team=team2, distinct_id="person_team_2", event="$pageview", - properties={"$current_url": "https://something.com", "another_key": "value"}, + properties={ + "$current_url": "https://something.com", + "another_key": "value", + }, ) filter = Filter(data={"properties": [{"key": "group", "value": "some group", "type": "person"}]}) @@ -479,7 +779,16 @@ def test_user_properties(self): self.assertEqual(events[0]["id"], event2_uuid) filter = Filter( - data={"properties": [{"key": "group", "operator": "is_not", "value": "some group", "type": "person"}]} + data={ + "properties": [ + { + "key": "group", + "operator": "is_not", + "value": "some group", + "type": "person", + } + ] + } ) events = _filter_events(filter=filter, team=self.team, order_by=None) self.assertEqual(events[0]["id"], event_p2_uuid) @@ -492,7 +801,10 @@ def test_user_properties_numerical(self): team=self.team, distinct_id="person1", event="$pageview", - properties={"$current_url": "https://something.com", "another_key": "value"}, + properties={ + "$current_url": "https://something.com", + "another_key": "value", + }, ) _create_event( team=self.team, @@ -515,7 +827,10 @@ def test_user_properties_numerical(self): def test_boolean_filters(self): _create_event(team=self.team, event="$pageview", distinct_id="test") event2_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"is_first_user": True} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"is_first_user": True}, ) filter = Filter(data={"properties": [{"key": "is_first_user", "value": "true"}]}) events = _filter_events(filter, self.team) @@ -525,10 +840,21 @@ def test_boolean_filters(self): def test_is_not_set_and_is_set(self): event1_uuid = _create_event(team=self.team, event="$pageview", distinct_id="test") event2_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"is_first_user": True} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"is_first_user": True}, ) filter = Filter( - data={"properties": [{"key": "is_first_user", "operator": "is_not_set", "value": "is_not_set"}]} + data={ + "properties": [ + { + "key": "is_first_user", + "operator": "is_not_set", + "value": "is_not_set", + } + ] + } ) events = _filter_events(filter, self.team) self.assertEqual(events[0]["id"], event1_uuid) @@ -542,7 +868,10 @@ def test_is_not_set_and_is_set(self): def test_is_not_set_and_is_set_with_missing_value(self): event1_uuid = _create_event(team=self.team, event="$pageview", distinct_id="test") event2_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"is_first_user": True} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"is_first_user": True}, ) filter = Filter(data={"properties": [{"key": "is_first_user", "operator": "is_not_set"}]}) events = _filter_events(filter, self.team) @@ -557,7 +886,10 @@ def test_is_not_set_and_is_set_with_missing_value(self): def test_true_false(self): _create_event(team=self.team, distinct_id="test", event="$pageview") event2_uuid = _create_event( - team=self.team, event="$pageview", distinct_id="test", properties={"is_first": True} + team=self.team, + event="$pageview", + distinct_id="test", + properties={"is_first": True}, ) filter = Filter(data={"properties": {"is_first": "true"}}) events = _filter_events(filter, self.team) @@ -570,7 +902,12 @@ def test_true_false(self): def test_is_not_true_false(self): event_uuid = _create_event(team=self.team, distinct_id="test", event="$pageview") - _create_event(team=self.team, event="$pageview", distinct_id="test", properties={"is_first": True}) + _create_event( + team=self.team, + event="$pageview", + distinct_id="test", + properties={"is_first": True}, + ) filter = Filter(data={"properties": [{"key": "is_first", "value": "true", "operator": "is_not"}]}) events = _filter_events(filter, self.team) self.assertEqual(events[0]["id"], event_uuid) @@ -590,7 +927,11 @@ def test_json_object(self): filter = Filter( data={ "properties": [ - {"key": "name", "value": json.dumps({"first_name": "Mary", "last_name": "Smith"}), "type": "person"} + { + "key": "name", + "value": json.dumps({"first_name": "Mary", "last_name": "Smith"}), + "type": "person", + } ] } ) @@ -603,7 +944,10 @@ def test_element_selectors(self): team=self.team, event="$autocapture", distinct_id="distinct_id", - elements=[Element.objects.create(tag_name="a"), Element.objects.create(tag_name="div")], + elements=[ + Element.objects.create(tag_name="a"), + Element.objects.create(tag_name="div"), + ], ) _create_event(team=self.team, event="$autocapture", distinct_id="distinct_id") filter = Filter(data={"properties": [{"key": "selector", "value": "div > a", "type": "element"}]}) @@ -615,7 +959,10 @@ def test_element_filter(self): team=self.team, event="$autocapture", distinct_id="distinct_id", - elements=[Element.objects.create(tag_name="a", text="some text"), Element.objects.create(tag_name="div")], + elements=[ + Element.objects.create(tag_name="a", text="some text"), + Element.objects.create(tag_name="div"), + ], ) _create_event( @@ -630,7 +977,15 @@ def test_element_filter(self): _create_event(team=self.team, event="$autocapture", distinct_id="distinct_id") filter = Filter( - data={"properties": [{"key": "text", "value": ["some text", "some other text"], "type": "element"}]} + data={ + "properties": [ + { + "key": "text", + "value": ["some text", "some other text"], + "type": "element", + } + ] + } ) events = _filter_events(filter=filter, team=self.team) self.assertEqual(len(events), 2) @@ -640,15 +995,31 @@ def test_element_filter(self): self.assertEqual(len(events_response_2), 1) def test_filter_out_team_members(self): - _create_person(team_id=self.team.pk, distinct_ids=["team_member"], properties={"email": "test@posthog.com"}) - _create_person(team_id=self.team.pk, distinct_ids=["random_user"], properties={"email": "test@gmail.com"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["team_member"], + properties={"email": "test@posthog.com"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["random_user"], + properties={"email": "test@gmail.com"}, + ) self.team.test_account_filters = [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ] self.team.save() _create_event(team=self.team, distinct_id="team_member", event="$pageview") _create_event(team=self.team, distinct_id="random_user", event="$pageview") - filter = Filter(data={FILTER_TEST_ACCOUNTS: True, "events": [{"id": "$pageview"}]}, team=self.team) + filter = Filter( + data={FILTER_TEST_ACCOUNTS: True, "events": [{"id": "$pageview"}]}, + team=self.team, + ) events = _filter_events(filter=filter, team=self.team) self.assertEqual(len(events), 1) @@ -680,7 +1051,12 @@ def test_filter_out_team_members_with_grouped_properties(self): ) self.team.test_account_filters = [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ] self.team.save() @@ -689,19 +1065,54 @@ def test_filter_out_team_members_with_grouped_properties(self): create_people=False, events_by_person={ "person1": [ - {"event": "$pageview", "properties": {"key": "val", "$browser": "Safari", "$browser_version": 14}} + { + "event": "$pageview", + "properties": { + "key": "val", + "$browser": "Safari", + "$browser_version": 14, + }, + } ], "person2": [ - {"event": "$pageview", "properties": {"key": "val", "$browser": "Safari", "$browser_version": 14}} + { + "event": "$pageview", + "properties": { + "key": "val", + "$browser": "Safari", + "$browser_version": 14, + }, + } ], "person3": [ - {"event": "$pageview", "properties": {"key": "val", "$browser": "Safari", "$browser_version": 14}} + { + "event": "$pageview", + "properties": { + "key": "val", + "$browser": "Safari", + "$browser_version": 14, + }, + } ], "person4": [ - {"event": "$pageview", "properties": {"key": "val", "$browser": "Safari", "$browser_version": 14}} + { + "event": "$pageview", + "properties": { + "key": "val", + "$browser": "Safari", + "$browser_version": 14, + }, + } ], "person5": [ - {"event": "$pageview", "properties": {"key": "val", "$browser": "Safari", "$browser_version": 14}} + { + "event": "$pageview", + "properties": { + "key": "val", + "$browser": "Safari", + "$browser_version": 14, + }, + } ], }, ) @@ -716,16 +1127,36 @@ def test_filter_out_team_members_with_grouped_properties(self): { "type": "OR", "values": [ - {"key": "age", "value": "10", "operator": "exact", "type": "person"}, - {"key": "age", "value": "20", "operator": "exact", "type": "person"}, + { + "key": "age", + "value": "10", + "operator": "exact", + "type": "person", + }, + { + "key": "age", + "value": "20", + "operator": "exact", + "type": "person", + }, # choose person 1 and 2 ], }, { "type": "AND", "values": [ - {"key": "$browser", "value": "Safari", "operator": "exact", "type": "event"}, - {"key": "age", "value": "50", "operator": "exact", "type": "person"}, + { + "key": "$browser", + "value": "Safari", + "operator": "exact", + "type": "event", + }, + { + "key": "age", + "value": "50", + "operator": "exact", + "type": "person", + }, # choose person 5 ], }, @@ -741,7 +1172,9 @@ def test_filter_out_team_members_with_grouped_properties(self): def test_person_cohort_properties(self): person1_distinct_id = "person1" Person.objects.create( - team=self.team, distinct_ids=[person1_distinct_id], properties={"$some_prop": "something"} + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"$some_prop": "something"}, ) cohort1 = Cohort.objects.create( @@ -752,17 +1185,31 @@ def test_person_cohort_properties(self): person2_distinct_id = "person2" Person.objects.create( - team=self.team, distinct_ids=[person2_distinct_id], properties={"$some_prop": "different"} + team=self.team, + distinct_ids=[person2_distinct_id], + properties={"$some_prop": "different"}, ) cohort2 = Cohort.objects.create( team=self.team, groups=[ - {"properties": [{"type": "person", "key": "$some_prop", "value": "something", "operator": "is_not"}]} + { + "properties": [ + { + "type": "person", + "key": "$some_prop", + "value": "something", + "operator": "is_not", + } + ] + } ], name="cohort2", ) - filter = Filter(data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort"}]}, + team=self.team, + ) prop_clause, prop_clause_params = parse_prop_grouped_clauses( property_group=filter.property_groups, @@ -776,13 +1223,16 @@ def test_person_cohort_properties(self): prop_clause=prop_clause ) # get distinct_id column of result - result = sync_execute(query, {"team_id": self.team.pk, **prop_clause_params, **filter.hogql_context.values})[0][ + result = sync_execute(query, {"team_id": self.team.pk, **prop_clause_params, **filter.hogql_context.values,},)[ 0 - ] + ][0] self.assertEqual(result, person1_distinct_id) # test cohort2 with negation - filter = Filter(data={"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort"}]}, team=self.team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort"}]}, + team=self.team, + ) prop_clause, prop_clause_params = parse_prop_grouped_clauses( property_group=filter.property_groups, has_person_id_joined=False, @@ -795,9 +1245,9 @@ def test_person_cohort_properties(self): prop_clause=prop_clause ) # get distinct_id column of result - result = sync_execute(query, {"team_id": self.team.pk, **prop_clause_params, **filter.hogql_context.values})[0][ + result = sync_execute(query, {"team_id": self.team.pk, **prop_clause_params, **filter.hogql_context.values,},)[ 0 - ] + ][0] self.assertEqual(result, person2_distinct_id) @@ -813,7 +1263,12 @@ def test_simplify_nested(self): { "type": "AND", "values": [ - {"type": "person", "key": "email", "operator": "icontains", "value": ".com"} + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": ".com", + } ], } ], @@ -821,8 +1276,18 @@ def test_simplify_nested(self): { "type": "AND", "values": [ - {"type": "person", "key": "email", "operator": "icontains", "value": "arg2"}, - {"type": "person", "key": "email", "operator": "icontains", "value": "arg3"}, + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": "arg2", + }, + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": "arg3", + }, ], }, ], @@ -840,7 +1305,14 @@ def test_simplify_nested(self): "values": [ { "type": "OR", - "values": [{"type": "person", "key": "email", "operator": "icontains", "value": ".com"}], + "values": [ + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": ".com", + } + ], }, { "type": "AND", @@ -848,13 +1320,23 @@ def test_simplify_nested(self): { "type": "AND", "values": [ - {"type": "person", "key": "email", "operator": "icontains", "value": "arg2"} + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": "arg2", + } ], }, { "type": "AND", "values": [ - {"type": "person", "key": "email", "operator": "icontains", "value": "arg3"} + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": "arg3", + } ], }, ], @@ -875,14 +1357,26 @@ def test_simplify_nested(self): { "type": "AND", "values": [ - {"type": "person", "key": "email", "operator": "icontains", "value": ".com"} + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": ".com", + } ], } ], }, { "type": "AND", - "values": [{"type": "person", "key": "email", "operator": "icontains", "value": "arg2"}], + "values": [ + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": "arg2", + } + ], }, ], } @@ -897,11 +1391,25 @@ def test_simplify_nested(self): "values": [ { "type": "OR", - "values": [{"type": "person", "key": "email", "operator": "icontains", "value": ".com"}], + "values": [ + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": ".com", + } + ], }, { "type": "AND", - "values": [{"type": "person", "key": "email", "operator": "icontains", "value": "arg2"}], + "values": [ + { + "type": "person", + "key": "email", + "operator": "icontains", + "value": "arg2", + } + ], }, ], } diff --git a/ee/clickhouse/models/test/test_property.py b/ee/clickhouse/models/test/test_property.py index 4c7082adc8ba5..f55578878f91a 100644 --- a/ee/clickhouse/models/test/test_property.py +++ b/ee/clickhouse/models/test/test_property.py @@ -12,7 +12,10 @@ from posthog.models.cohort import Cohort from posthog.models.element import Element from posthog.models.filters import Filter -from posthog.models.instance_setting import get_instance_setting, override_instance_config +from posthog.models.instance_setting import ( + get_instance_setting, + override_instance_config, +) from posthog.models.organization import Organization from posthog.models.property import Property, TableWithProperties from posthog.models.property.util import ( @@ -49,25 +52,48 @@ def _run_query(self, filter: Filter, **kwargs) -> List: **kwargs, ) final_query = "SELECT uuid FROM events WHERE team_id = %(team_id)s {}".format(query) - return sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + return sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) def test_prop_person(self): - _create_person( - distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"email": "another@posthog.com"} + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"email": "another@posthog.com"}, ) - _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"email": "test@posthog.com"}) + _create_person( + distinct_ids=["some_id"], + team_id=self.team.pk, + properties={"email": "test@posthog.com"}, + ) - _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr": "some_val"}, + ) filter = Filter(data={"properties": [{"key": "email", "value": "test@posthog.com", "type": "person"}]}) self.assertEqual(len(self._run_query(filter)), 1) def test_prop_event(self): - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"attr": "some_other_val"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"attr": "some_other_val"}, + ) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"attr": "some_val"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"attr": "some_val"}, + ) filter_exact = Filter(data={"properties": [{"key": "attr", "value": "some_val"}]}) self.assertEqual(len(self._run_query(filter_exact)), 1) @@ -90,8 +116,20 @@ def test_prop_element(self): distinct_id="whatever", properties={"attr": "some_other_val"}, elements=[ - Element(tag_name="a", href="/a-url", attr_class=["small"], text="bla bla", nth_child=1, nth_of_type=0), - Element(tag_name="button", attr_class=["btn", "btn-primary"], nth_child=0, nth_of_type=0), + Element( + tag_name="a", + href="/a-url", + attr_class=["small"], + text="bla bla", + nth_child=1, + nth_of_type=0, + ), + Element( + tag_name="button", + attr_class=["btn", "btn-primary"], + nth_child=0, + nth_of_type=0, + ), Element(tag_name="div", nth_child=0, nth_of_type=0), Element(tag_name="label", nth_child=0, nth_of_type=0, attr_id="nested"), ], @@ -111,7 +149,12 @@ def test_prop_element(self): nth_child=1, nth_of_type=0, ), - Element(tag_name="button", attr_class=["btn", "btn-secondary"], nth_child=0, nth_of_type=0), + Element( + tag_name="button", + attr_class=["btn", "btn-secondary"], + nth_child=0, + nth_of_type=0, + ), Element(tag_name="div", nth_child=0, nth_of_type=0), Element(tag_name="img", nth_child=0, nth_of_type=0, attr_id="nested"), ], @@ -122,32 +165,69 @@ def test_prop_element(self): distinct_id="whatever", elements=[ Element(tag_name="a", href="/789", nth_child=0, nth_of_type=0), - Element(tag_name="button", attr_class=["btn", "btn-tertiary"], nth_child=0, nth_of_type=0), + Element( + tag_name="button", + attr_class=["btn", "btn-tertiary"], + nth_child=0, + nth_of_type=0, + ), ], ) # selector filter = Filter( - data={"properties": [{"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "selector", + "value": [".btn"], + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter)), 3) filter = Filter( - data={"properties": [{"key": "selector", "value": ".btn", "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "selector", + "value": ".btn", + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter)), 3) filter = Filter( data={ - "properties": [{"key": "selector", "value": [".btn-primary"], "operator": "exact", "type": "element"}] + "properties": [ + { + "key": "selector", + "value": [".btn-primary"], + "operator": "exact", + "type": "element", + } + ] } ) self.assertEqual(len(self._run_query(filter)), 1) filter = Filter( data={ - "properties": [{"key": "selector", "value": [".btn-secondary"], "operator": "exact", "type": "element"}] + "properties": [ + { + "key": "selector", + "value": [".btn-secondary"], + "operator": "exact", + "type": "element", + } + ] } ) self.assertEqual(len(self._run_query(filter)), 1) @@ -167,40 +247,101 @@ def test_prop_element(self): self.assertEqual(len(self._run_query(filter)), 2) filter_selector_exact_empty = Filter( - data={"properties": [{"key": "selector", "value": [], "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "selector", + "value": [], + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_selector_exact_empty)), 0) filter_selector_is_not_empty = Filter( - data={"properties": [{"key": "selector", "value": [], "operator": "is_not", "type": "element"}]} + data={ + "properties": [ + { + "key": "selector", + "value": [], + "operator": "is_not", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_selector_is_not_empty)), 3) # tag_name filter = Filter( - data={"properties": [{"key": "tag_name", "value": ["div"], "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "tag_name", + "value": ["div"], + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter)), 2) filter = Filter( - data={"properties": [{"key": "tag_name", "value": "div", "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "tag_name", + "value": "div", + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter)), 2) filter = Filter( - data={"properties": [{"key": "tag_name", "value": ["img"], "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "tag_name", + "value": ["img"], + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter)), 1) filter = Filter( - data={"properties": [{"key": "tag_name", "value": ["label"], "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "tag_name", + "value": ["label"], + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter)), 1) filter = Filter( data={ - "properties": [{"key": "tag_name", "value": ["img", "label"], "operator": "exact", "type": "element"}] + "properties": [ + { + "key": "tag_name", + "value": ["img", "label"], + "operator": "exact", + "type": "element", + } + ] } ) self.assertEqual(len(self._run_query(filter)), 2) @@ -208,12 +349,30 @@ def test_prop_element(self): # href/text filter_href_exact = Filter( - data={"properties": [{"key": "href", "value": ["/a-url"], "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "href", + "value": ["/a-url"], + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_href_exact)), 2) filter_href_exact_double = Filter( - data={"properties": [{"key": "href", "value": ["/a-url", "/789"], "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "href", + "value": ["/a-url", "/789"], + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_href_exact_double)), 3) @@ -223,17 +382,44 @@ def test_prop_element(self): self.assertEqual(len(self._run_query(filter_href_exact_empty)), 0) filter_href_is_not = Filter( - data={"properties": [{"key": "href", "value": ["/a-url"], "operator": "is_not", "type": "element"}]} + data={ + "properties": [ + { + "key": "href", + "value": ["/a-url"], + "operator": "is_not", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_href_is_not)), 1) filter_href_is_not_double = Filter( - data={"properties": [{"key": "href", "value": ["/a-url", "/789"], "operator": "is_not", "type": "element"}]} + data={ + "properties": [ + { + "key": "href", + "value": ["/a-url", "/789"], + "operator": "is_not", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_href_is_not_double)), 0) filter_href_is_not_empty = Filter( - data={"properties": [{"key": "href", "value": [], "operator": "is_not", "type": "element"}]} + data={ + "properties": [ + { + "key": "href", + "value": [], + "operator": "is_not", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_href_is_not_empty)), 3) @@ -241,39 +427,98 @@ def test_prop_element(self): data={ "properties": [ {"key": "href", "value": ["/a-url"], "type": "element"}, - {"key": "tag_name", "value": ["marquee"], "operator": "is_not", "type": "element"}, + { + "key": "tag_name", + "value": ["marquee"], + "operator": "is_not", + "type": "element", + }, ] } ) self.assertEqual(len(self._run_query(filter_href_exact_with_tag_name_is_not)), 2) filter_href_icontains = Filter( - data={"properties": [{"key": "href", "value": ["UrL"], "operator": "icontains", "type": "element"}]} + data={ + "properties": [ + { + "key": "href", + "value": ["UrL"], + "operator": "icontains", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_href_icontains)), 2) filter_href_regex = Filter( - data={"properties": [{"key": "href", "value": "/a-.+", "operator": "regex", "type": "element"}]} + data={ + "properties": [ + { + "key": "href", + "value": "/a-.+", + "operator": "regex", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_href_regex)), 2) filter_href_not_regex = Filter( - data={"properties": [{"key": "href", "value": r"/\d+", "operator": "not_regex", "type": "element"}]} + data={ + "properties": [ + { + "key": "href", + "value": r"/\d+", + "operator": "not_regex", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_href_not_regex)), 2) filter_text_icontains_with_doublequote = Filter( - data={"properties": [{"key": "text", "value": 'bla"bla', "operator": "icontains", "type": "element"}]} + data={ + "properties": [ + { + "key": "text", + "value": 'bla"bla', + "operator": "icontains", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_text_icontains_with_doublequote)), 1) filter_text_is_set = Filter( - data={"properties": [{"key": "text", "value": "is_set", "operator": "is_set", "type": "element"}]} + data={ + "properties": [ + { + "key": "text", + "value": "is_set", + "operator": "is_set", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_text_is_set)), 2) filter_text_is_not_set = Filter( - data={"properties": [{"key": "text", "value": "is_not_set", "operator": "is_not_set", "type": "element"}]} + data={ + "properties": [ + { + "key": "text", + "value": "is_not_set", + "operator": "is_not_set", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter_text_is_not_set)), 1) @@ -284,22 +529,56 @@ def test_prop_element_with_space(self): distinct_id="whatever", elements=[ Element(tag_name="a", href="/789", nth_child=0, nth_of_type=0), - Element(tag_name="button", attr_class=["btn space", "btn-tertiary"], nth_child=0, nth_of_type=0), + Element( + tag_name="button", + attr_class=["btn space", "btn-tertiary"], + nth_child=0, + nth_of_type=0, + ), ], ) # selector filter = Filter( - data={"properties": [{"key": "selector", "value": ["button"], "operator": "exact", "type": "element"}]} + data={ + "properties": [ + { + "key": "selector", + "value": ["button"], + "operator": "exact", + "type": "element", + } + ] + } ) self.assertEqual(len(self._run_query(filter)), 1) def test_prop_ints_saved_as_strings(self): - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": "0"}) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": "2"}) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": 2}) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": "string"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": "0"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": "2"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": 2}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": "string"}, + ) filter = Filter(data={"properties": [{"key": "test_prop", "value": "2"}]}) self.assertEqual(len(self._run_query(filter)), 2) @@ -320,10 +599,30 @@ def test_prop_ints_saved_as_strings(self): self.assertEqual(len(self._run_query(filter)), 3) def test_prop_decimals(self): - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": 1.4}) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": 1.3}) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": 2}) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": 2.5}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": 1.4}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": 1.3}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": 2}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": 2.5}, + ) filter = Filter(data={"properties": [{"key": "test_prop", "value": 1.5}]}) self.assertEqual(len(self._run_query(filter)), 0) @@ -339,14 +638,26 @@ def test_prop_decimals(self): @snapshot_clickhouse_queries def test_parse_groups(self): - _create_event( - event="$pageview", team=self.team, distinct_id="some_id", properties={"attr_1": "val_1", "attr_2": "val_2"} + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr_1": "val_1", "attr_2": "val_2"}, ) - _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr_1": "val_2"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr_1": "val_2"}, + ) - _create_event(event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr_1": "val_3"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_other_id", + properties={"attr_1": "val_3"}, + ) filter = Filter( data={ @@ -355,7 +666,10 @@ def test_parse_groups(self): "values": [ { "type": "AND", - "values": [{"key": "attr_1", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}], + "values": [ + {"key": "attr_1", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], }, {"type": "OR", "values": [{"key": "attr_1", "value": "val_2"}]}, ], @@ -366,7 +680,6 @@ def test_parse_groups(self): self.assertEqual(len(self._run_query(filter)), 2) def test_parse_groups_invalid_type(self): - filter = Filter( data={ "properties": { @@ -374,7 +687,10 @@ def test_parse_groups_invalid_type(self): "values": [ { "type": "AND", - "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}], + "values": [ + {"key": "attr", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], }, {"type": "XOR", "values": [{"key": "attr", "value": "val_2"}]}, ], @@ -386,19 +702,42 @@ def test_parse_groups_invalid_type(self): @snapshot_clickhouse_queries def test_parse_groups_persons(self): - _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"email": "1@posthog.com"}) + _create_person( + distinct_ids=["some_id"], + team_id=self.team.pk, + properties={"email": "1@posthog.com"}, + ) - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"email": "2@posthog.com"}) _create_person( - distinct_ids=["some_other_random_id"], team_id=self.team.pk, properties={"email": "X@posthog.com"} + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"email": "2@posthog.com"}, + ) + _create_person( + distinct_ids=["some_other_random_id"], + team_id=self.team.pk, + properties={"email": "X@posthog.com"}, ) - _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "val_1"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr": "val_1"}, + ) - _create_event(event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "val_3"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_other_id", + properties={"attr": "val_3"}, + ) _create_event( - event="$pageview", team=self.team, distinct_id="some_other_random_id", properties={"attr": "val_3"} + event="$pageview", + team=self.team, + distinct_id="some_other_random_id", + properties={"attr": "val_3"}, ) filter = Filter( @@ -406,8 +745,26 @@ def test_parse_groups_persons(self): "properties": { "type": "OR", "values": [ - {"type": "OR", "values": [{"key": "email", "type": "person", "value": "1@posthog.com"}]}, - {"type": "OR", "values": [{"key": "email", "type": "person", "value": "2@posthog.com"}]}, + { + "type": "OR", + "values": [ + { + "key": "email", + "type": "person", + "value": "1@posthog.com", + } + ], + }, + { + "type": "OR", + "values": [ + { + "key": "email", + "type": "person", + "value": "2@posthog.com", + } + ], + }, ], } } @@ -441,14 +798,25 @@ def _run_query(self, filter: Filter, join_person_tables=False) -> List: final_query = f"SELECT uuid FROM events {joins} WHERE team_id = %(team_id)s {query}" # Make sure we don't accidentally use json on the properties field self.assertNotIn("json", final_query.lower()) - return sync_execute(final_query, {**params, **filter.hogql_context.values, "team_id": self.team.pk}) + return sync_execute( + final_query, + {**params, **filter.hogql_context.values, "team_id": self.team.pk}, + ) def test_prop_event_denormalized(self): _create_event( - event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": "some_other_val"} + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": "some_other_val"}, ) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": "some_val"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": "some_val"}, + ) materialize("events", "test_prop") materialize("events", "something_else") @@ -468,22 +836,54 @@ def test_prop_event_denormalized(self): filter = Filter(data={"properties": [{"key": "test_prop", "value": "_other_", "operator": "icontains"}]}) self.assertEqual(len(self._run_query(filter)), 1) - filter = Filter(data={"properties": [{"key": "test_prop", "value": "_other_", "operator": "not_icontains"}]}) + filter = Filter( + data={ + "properties": [ + { + "key": "test_prop", + "value": "_other_", + "operator": "not_icontains", + } + ] + } + ) self.assertEqual(len(self._run_query(filter)), 1) def test_prop_person_denormalized(self): - _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"email": "test@posthog.com"}) + _create_person( + distinct_ids=["some_id"], + team_id=self.team.pk, + properties={"email": "test@posthog.com"}, + ) _create_event(event="$pageview", team=self.team, distinct_id="some_id") materialize("person", "email") filter = Filter( - data={"properties": [{"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}]} + data={ + "properties": [ + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + } + ] + } ) self.assertEqual(len(self._run_query(filter, join_person_tables=True)), 1) filter = Filter( - data={"properties": [{"key": "email", "type": "person", "value": "posthog", "operator": "not_icontains"}]} + data={ + "properties": [ + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "not_icontains", + } + ] + } ) self.assertEqual(len(self._run_query(filter, join_person_tables=True)), 0) @@ -495,15 +895,35 @@ def test_prop_person_groups_denormalized(self): { "type": "OR", "values": [ - {"key": "event_prop2", "value": ["foo2", "bar2"], "type": "event", "operator": None}, - {"key": "person_prop2", "value": "efg2", "type": "person", "operator": None}, + { + "key": "event_prop2", + "value": ["foo2", "bar2"], + "type": "event", + "operator": None, + }, + { + "key": "person_prop2", + "value": "efg2", + "type": "person", + "operator": None, + }, ], }, { "type": "AND", "values": [ - {"key": "event_prop", "value": ["foo", "bar"], "type": "event", "operator": None}, - {"key": "person_prop", "value": "efg", "type": "person", "operator": None}, + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "event", + "operator": None, + }, + { + "key": "person_prop", + "value": "efg", + "type": "person", + "operator": None, + }, ], }, ], @@ -513,13 +933,31 @@ def test_prop_person_groups_denormalized(self): filter = Filter(data=_filter) _create_person(distinct_ids=["some_id_1"], team_id=self.team.pk, properties={}) - _create_event(event="$pageview", team=self.team, distinct_id="some_id_1", properties={"event_prop2": "foo2"}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_id_1", + properties={"event_prop2": "foo2"}, + ) - _create_person(distinct_ids=["some_id_2"], team_id=self.team.pk, properties={"person_prop2": "efg2"}) + _create_person( + distinct_ids=["some_id_2"], + team_id=self.team.pk, + properties={"person_prop2": "efg2"}, + ) _create_event(event="$pageview", team=self.team, distinct_id="some_id_2") - _create_person(distinct_ids=["some_id_3"], team_id=self.team.pk, properties={"person_prop": "efg"}) - _create_event(event="$pageview", team=self.team, distinct_id="some_id_3", properties={"event_prop": "foo"}) + _create_person( + distinct_ids=["some_id_3"], + team_id=self.team.pk, + properties={"person_prop": "efg"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_id_3", + properties={"event_prop": "foo"}, + ) materialize("events", "event_prop") materialize("events", "event_prop2") @@ -528,9 +966,19 @@ def test_prop_person_groups_denormalized(self): self.assertEqual(len(self._run_query(filter, join_person_tables=True)), 3) def test_prop_event_denormalized_ints(self): - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": 0}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": 0}, + ) - _create_event(event="$pageview", team=self.team, distinct_id="whatever", properties={"test_prop": 2}) + _create_event( + event="$pageview", + team=self.team, + distinct_id="whatever", + properties={"test_prop": 2}, + ) materialize("events", "test_prop") materialize("events", "something_else") @@ -547,14 +995,26 @@ def test_prop_event_denormalized_ints(self): def test_get_property_string_expr(self): string_expr = get_property_string_expr("events", "some_non_mat_prop", "'some_non_mat_prop'", "properties") self.assertEqual( - string_expr, ("replaceRegexpAll(JSONExtractRaw(properties, 'some_non_mat_prop'), '^\"|\"$', '')", False) + string_expr, + ( + "replaceRegexpAll(JSONExtractRaw(properties, 'some_non_mat_prop'), '^\"|\"$', '')", + False, + ), ) string_expr = get_property_string_expr( - "events", "some_non_mat_prop", "'some_non_mat_prop'", "properties", table_alias="e" + "events", + "some_non_mat_prop", + "'some_non_mat_prop'", + "properties", + table_alias="e", ) self.assertEqual( - string_expr, ("replaceRegexpAll(JSONExtractRaw(e.properties, 'some_non_mat_prop'), '^\"|\"$', '')", False) + string_expr, + ( + "replaceRegexpAll(JSONExtractRaw(e.properties, 'some_non_mat_prop'), '^\"|\"$', '')", + False, + ), ) materialize("events", "some_mat_prop") @@ -569,7 +1029,11 @@ def test_get_property_string_expr(self): materialize("events", "some_mat_prop2", table_column="person_properties") materialize("events", "some_mat_prop3", table_column="group2_properties") string_expr = get_property_string_expr( - "events", "some_mat_prop2", "x", "properties", materialised_table_column="person_properties" + "events", + "some_mat_prop2", + "x", + "properties", + materialised_table_column="person_properties", ) self.assertEqual(string_expr, ('"mat_pp_some_mat_prop2"', True)) @@ -597,7 +1061,13 @@ def test_get_property_string_expr_groups(self): table_alias="e", materialised_table_column="group1_properties", ) - self.assertEqual(string_expr, ("replaceRegexpAll(JSONExtractRaw(e.gp_props_alias, 'x'), '^\"|\"$', '')", False)) + self.assertEqual( + string_expr, + ( + "replaceRegexpAll(JSONExtractRaw(e.gp_props_alias, 'x'), '^\"|\"$', '')", + False, + ), + ) @pytest.mark.django_db @@ -606,7 +1076,12 @@ def test_parse_prop_clauses_defaults(snapshot): data={ "properties": [ {"key": "event_prop", "value": "value"}, - {"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}, + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + }, ] } ) @@ -644,13 +1119,15 @@ def test_parse_prop_clauses_defaults(snapshot): @pytest.mark.django_db def test_parse_prop_clauses_precalculated_cohort(snapshot): - org = Organization.objects.create(name="other org") team = Team.objects.create(organization=org) cohort = Cohort.objects.create(team=team, groups=[{"event_id": "$pageview", "days": 7}], name="cohort") - filter = Filter(data={"properties": [{"key": "id", "value": cohort.pk, "type": "precalculated-cohort"}]}, team=team) + filter = Filter( + data={"properties": [{"key": "id", "value": cohort.pk, "type": "precalculated-cohort"}]}, + team=team, + ) assert ( parse_prop_grouped_clauses( @@ -669,7 +1146,16 @@ def test_parse_prop_clauses_precalculated_cohort(snapshot): @pytest.mark.django_db def test_parse_prop_clauses_funnel_step_element_prepend_regression(snapshot): filter = Filter( - data={"properties": [{"key": "text", "type": "element", "value": "Insights1", "operator": "exact"}]} + data={ + "properties": [ + { + "key": "text", + "type": "element", + "value": "Insights1", + "operator": "exact", + } + ] + } ) assert ( @@ -687,7 +1173,12 @@ def test_parse_prop_clauses_funnel_step_element_prepend_regression(snapshot): @pytest.mark.django_db def test_parse_groups_persons_edge_case_with_single_filter(snapshot): filter = Filter( - data={"properties": {"type": "OR", "values": [{"key": "email", "type": "person", "value": "1@posthog.com"}]}} + data={ + "properties": { + "type": "OR", + "values": [{"key": "email", "type": "person", "value": "1@posthog.com"}], + } + } ) assert ( parse_prop_grouped_clauses( @@ -785,14 +1276,22 @@ def test_breakdown_query_expression_materialised( materialize(table, breakdown[0], table_column="properties") actual = get_single_or_multi_property_string_expr( - breakdown, table, query_alias, column, materialised_table_column=materialise_column + breakdown, + table, + query_alias, + column, + materialised_table_column=materialise_column, ) assert actual == expected_with materialize(table, breakdown[0], table_column=materialise_column) # type: ignore actual = get_single_or_multi_property_string_expr( - breakdown, table, query_alias, column, materialised_table_column=materialise_column + breakdown, + table, + query_alias, + column, + materialised_table_column=materialise_column, ) assert actual == expected_without @@ -830,7 +1329,11 @@ def test_events(db, team) -> List[UUID]: group2_properties={"attr": "50"}, ), _create_event( - event="$pageview", team=team, distinct_id="whatever", properties={"attr": 5}, group2_properties={"attr": 5} + event="$pageview", + team=team, + distinct_id="whatever", + properties={"attr": 5}, + group2_properties={"attr": 5}, ), _create_event( event="$pageview", @@ -1023,9 +1526,18 @@ def clean_up_materialised_columns(): TEST_PROPERTIES = [ pytest.param(Property(key="email", value="test@posthog.com"), [0]), pytest.param(Property(key="email", value="test@posthog.com", operator="exact"), [0]), - pytest.param(Property(key="email", value=["pineapple@pizza.com", "mongo@example.com"], operator="exact"), [1]), pytest.param( - Property(key="attr", value="5"), [4], id="matching a number only matches event index 4 from test_events" + Property( + key="email", + value=["pineapple@pizza.com", "mongo@example.com"], + operator="exact", + ), + [1], + ), + pytest.param( + Property(key="attr", value="5"), + [4], + id="matching a number only matches event index 4 from test_events", ), pytest.param( Property(key="email", value="test@posthog.com", operator="is_not"), @@ -1033,7 +1545,11 @@ def clean_up_materialised_columns(): id="matching on email is not a value matches all but the first event from test_events", ), pytest.param( - Property(key="email", value=["test@posthog.com", "mongo@example.com"], operator="is_not"), + Property( + key="email", + value=["test@posthog.com", "mongo@example.com"], + operator="is_not", + ), range(2, 27), id="matching on email is not a value matches all but the first two events from test_events", ), @@ -1071,24 +1587,44 @@ def clean_up_materialised_columns(): [7, 8], id="match after date only value against date and time formatted property", ), - pytest.param(Property(key="long_date", operator="is_date_before", value="2021-04-01 18:30:00"), [7]), - pytest.param(Property(key="long_date", operator="is_date_after", value="2021-04-01 18:30:00"), [8]), + pytest.param( + Property(key="long_date", operator="is_date_before", value="2021-04-01 18:30:00"), + [7], + ), + pytest.param( + Property(key="long_date", operator="is_date_after", value="2021-04-01 18:30:00"), + [8], + ), pytest.param(Property(key="short_date", operator="is_date_before", value="2021-04-05"), [9]), pytest.param(Property(key="short_date", operator="is_date_after", value="2021-04-05"), [10]), - pytest.param(Property(key="short_date", operator="is_date_before", value="2021-04-07"), [9, 10]), - pytest.param(Property(key="short_date", operator="is_date_after", value="2021-04-03"), [9, 10]), + pytest.param( + Property(key="short_date", operator="is_date_before", value="2021-04-07"), + [9, 10], + ), + pytest.param( + Property(key="short_date", operator="is_date_after", value="2021-04-03"), + [9, 10], + ), pytest.param( Property(key="sdk_$time", operator="is_date_before", value="2021-12-25"), [11], id="matching a unix timestamp in seconds with fractional seconds after the decimal point", ), pytest.param( - Property(key="unix_timestamp_milliseconds", operator="is_date_after", value="2022-01-11"), + Property( + key="unix_timestamp_milliseconds", + operator="is_date_after", + value="2022-01-11", + ), [12], id="matching unix timestamp in milliseconds after a given date (which ClickHouse doesn't support)", ), pytest.param( - Property(key="unix_timestamp_milliseconds", operator="is_date_before", value="2022-01-13"), + Property( + key="unix_timestamp_milliseconds", + operator="is_date_before", + value="2022-01-13", + ), [12], id="matching unix timestamp in milliseconds before a given date (which ClickHouse doesn't support)", ), @@ -1118,52 +1654,88 @@ def clean_up_materialised_columns(): id="matching ISO 8601 format date after a given date", ), pytest.param( - Property(key="full_date_increasing_$time", operator="is_date_before", value="2021-04-01 20:00:00"), + Property( + key="full_date_increasing_$time", + operator="is_date_before", + value="2021-04-01 20:00:00", + ), [15], id="matching full format date with date parts n increasing order before a given date", ), pytest.param( - Property(key="full_date_increasing_$time", operator="is_date_after", value="2021-04-01 18:00:00"), + Property( + key="full_date_increasing_$time", + operator="is_date_after", + value="2021-04-01 18:00:00", + ), [15], id="matching full format date with date parts in increasing order after a given date", ), pytest.param( - Property(key="with_slashes_$time", operator="is_date_before", value="2021-04-01 20:00:00"), + Property( + key="with_slashes_$time", + operator="is_date_before", + value="2021-04-01 20:00:00", + ), [16], id="matching full format date with date parts separated by slashes before a given date", ), pytest.param( - Property(key="with_slashes_$time", operator="is_date_after", value="2021-04-01 18:00:00"), + Property( + key="with_slashes_$time", + operator="is_date_after", + value="2021-04-01 18:00:00", + ), [16], id="matching full format date with date parts separated by slashes after a given date", ), pytest.param( - Property(key="with_slashes_increasing_$time", operator="is_date_before", value="2021-04-01 20:00:00"), + Property( + key="with_slashes_increasing_$time", + operator="is_date_before", + value="2021-04-01 20:00:00", + ), [17], id="matching full format date with date parts increasing in size and separated by slashes before a given date", ), pytest.param( - Property(key="with_slashes_increasing_$time", operator="is_date_after", value="2021-04-01 18:00:00"), + Property( + key="with_slashes_increasing_$time", + operator="is_date_after", + value="2021-04-01 18:00:00", + ), [17], id="matching full format date with date parts increasing in size and separated by slashes after a given date", ), pytest.param( - Property(key="date_only", operator="is_date_exact", value="2021-04-01"), [20, 21], id="can match dates exactly" + Property(key="date_only", operator="is_date_exact", value="2021-04-01"), + [20, 21], + id="can match dates exactly", ), pytest.param( - Property(key="date_only_matched_against_date_and_time", operator="is_date_exact", value="2021-03-31"), + Property( + key="date_only_matched_against_date_and_time", + operator="is_date_exact", + value="2021-03-31", + ), [23, 24], id="can match dates exactly against datetimes and unix timestamps", ), pytest.param( Property( - key="date_exact_including_seconds_and_milliseconds", operator="is_date_exact", value="2021-03-31 18:12:12" + key="date_exact_including_seconds_and_milliseconds", + operator="is_date_exact", + value="2021-03-31 18:12:12", ), [25], id="can match date times exactly against datetimes with milliseconds", ), pytest.param( - Property(key="date_exact_including_seconds_and_milliseconds", operator="is_date_after", value="2021-03-31"), + Property( + key="date_exact_including_seconds_and_milliseconds", + operator="is_date_after", + value="2021-03-31", + ), [], id="can match date only filter after against datetime with milliseconds", ), @@ -1189,7 +1761,8 @@ def test_prop_filter_json_extract(test_events, clean_up_materialised_columns, pr [ str(uuid) for (uuid,) in sync_execute( - f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}", {"team_id": team.pk, **params} + f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}", + {"team_id": team.pk, **params}, ) ] ) @@ -1216,7 +1789,8 @@ def test_prop_filter_json_extract_materialized( [ str(uuid) for (uuid,) in sync_execute( - f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}", {"team_id": team.pk, **params} + f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}", + {"team_id": team.pk, **params}, ) ] ) @@ -1251,7 +1825,8 @@ def test_prop_filter_json_extract_person_on_events_materialized( [ str(uuid) for (uuid,) in sync_execute( - f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}", {"team_id": team.pk, **params} + f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}", + {"team_id": team.pk, **params}, ) ] ) @@ -1276,15 +1851,35 @@ def test_combine_group_properties(): { "type": "OR", "values": [ - {"key": "a", "operator": "exact", "value": ["a", "b", "c"], "type": "event"}, - {"key": "b", "operator": "exact", "value": ["d", "e", "f"], "type": "event"}, + { + "key": "a", + "operator": "exact", + "value": ["a", "b", "c"], + "type": "event", + }, + { + "key": "b", + "operator": "exact", + "value": ["d", "e", "f"], + "type": "event", + }, ], }, { "type": "AND", "values": [ - {"key": "c", "operator": "exact", "value": ["g", "h", "i"], "type": "event"}, - {"key": "d", "operator": "exact", "value": ["j", "k", "l"], "type": "event"}, + { + "key": "c", + "operator": "exact", + "value": ["g", "h", "i"], + "type": "event", + }, + { + "key": "d", + "operator": "exact", + "value": ["j", "k", "l"], + "type": "event", + }, ], }, ], @@ -1297,15 +1892,35 @@ def test_combine_group_properties(): { "type": "OR", "values": [ - {"key": "a", "operator": "exact", "value": ["a", "b", "c"], "type": "event"}, - {"key": "b", "operator": "exact", "value": ["d", "e", "f"], "type": "event"}, + { + "key": "a", + "operator": "exact", + "value": ["a", "b", "c"], + "type": "event", + }, + { + "key": "b", + "operator": "exact", + "value": ["d", "e", "f"], + "type": "event", + }, ], }, { "type": "AND", "values": [ - {"key": "c", "operator": "exact", "value": ["g", "h", "i"], "type": "event"}, - {"key": "d", "operator": "exact", "value": ["j", "k", "l"], "type": "event"}, + { + "key": "c", + "operator": "exact", + "value": ["g", "h", "i"], + "type": "event", + }, + { + "key": "d", + "operator": "exact", + "value": ["j", "k", "l"], + "type": "event", + }, ], }, ], @@ -1315,8 +1930,18 @@ def test_combine_group_properties(): assert combined_group.to_dict() == { "type": "OR", "values": [ - {"key": "a", "operator": "exact", "value": ["a", "b", "c"], "type": "event"}, - {"key": "b", "operator": "exact", "value": ["d", "e", "f"], "type": "event"}, + { + "key": "a", + "operator": "exact", + "value": ["a", "b", "c"], + "type": "event", + }, + { + "key": "b", + "operator": "exact", + "value": ["d", "e", "f"], + "type": "event", + }, ], } @@ -1326,8 +1951,18 @@ def test_combine_group_properties(): assert combined_group.to_dict() == { "type": "AND", "values": [ - {"key": "c", "operator": "exact", "value": ["g", "h", "i"], "type": "event"}, - {"key": "d", "operator": "exact", "value": ["j", "k", "l"], "type": "event"}, + { + "key": "c", + "operator": "exact", + "value": ["g", "h", "i"], + "type": "event", + }, + { + "key": "d", + "operator": "exact", + "value": ["j", "k", "l"], + "type": "event", + }, ], } @@ -1335,25 +1970,79 @@ def test_combine_group_properties(): def test_session_property_validation(): # Property key not valid for type session with pytest.raises(ValidationError): - filter = Filter(data={"properties": [{"type": "session", "key": "some_prop", "value": 0, "operator": "gt"}]}) - parse_prop_grouped_clauses(team_id=1, property_group=filter.property_groups, hogql_context=filter.hogql_context) + filter = Filter( + data={ + "properties": [ + { + "type": "session", + "key": "some_prop", + "value": 0, + "operator": "gt", + } + ] + } + ) + parse_prop_grouped_clauses( + team_id=1, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, + ) # Operator not valid for $session_duration with pytest.raises(ValidationError): filter = Filter( - data={"properties": [{"type": "session", "key": "$session_duration", "value": 0, "operator": "is_set"}]} + data={ + "properties": [ + { + "type": "session", + "key": "$session_duration", + "value": 0, + "operator": "is_set", + } + ] + } + ) + parse_prop_grouped_clauses( + team_id=1, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) - parse_prop_grouped_clauses(team_id=1, property_group=filter.property_groups, hogql_context=filter.hogql_context) # Value not valid for $session_duration with pytest.raises(ValidationError): filter = Filter( - data={"properties": [{"type": "session", "key": "$session_duration", "value": "hey", "operator": "gt"}]} + data={ + "properties": [ + { + "type": "session", + "key": "$session_duration", + "value": "hey", + "operator": "gt", + } + ] + } + ) + parse_prop_grouped_clauses( + team_id=1, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) - parse_prop_grouped_clauses(team_id=1, property_group=filter.property_groups, hogql_context=filter.hogql_context) # Valid property values filter = Filter( - data={"properties": [{"type": "session", "key": "$session_duration", "value": "100", "operator": "gt"}]} + data={ + "properties": [ + { + "type": "session", + "key": "$session_duration", + "value": "100", + "operator": "gt", + } + ] + } + ) + parse_prop_grouped_clauses( + team_id=1, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) - parse_prop_grouped_clauses(team_id=1, property_group=filter.property_groups, hogql_context=filter.hogql_context) diff --git a/ee/clickhouse/queries/column_optimizer.py b/ee/clickhouse/queries/column_optimizer.py index 505729b1ea99a..330830f1c2288 100644 --- a/ee/clickhouse/queries/column_optimizer.py +++ b/ee/clickhouse/queries/column_optimizer.py @@ -9,7 +9,11 @@ from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.filters.utils import GroupTypeIndex from posthog.models.property import PropertyIdentifier -from posthog.models.property.util import box_value, count_hogql_properties, extract_tables_and_properties +from posthog.models.property.util import ( + box_value, + count_hogql_properties, + extract_tables_and_properties, +) from posthog.queries.column_optimizer.foss_column_optimizer import FOSSColumnOptimizer from posthog.queries.trends.util import is_series_group_based @@ -52,12 +56,22 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]: boxed_breakdown = box_value(self.filter.breakdown) for b in boxed_breakdown: if isinstance(b, str): - counter[(b, self.filter.breakdown_type, self.filter.breakdown_group_type_index)] += 1 + counter[ + ( + b, + self.filter.breakdown_type, + self.filter.breakdown_group_type_index, + ) + ] += 1 elif self.filter.breakdown_type == "group": # :TRICKY: We only support string breakdown for group properties assert isinstance(self.filter.breakdown, str) counter[ - (self.filter.breakdown, self.filter.breakdown_type, self.filter.breakdown_group_type_index) + ( + self.filter.breakdown, + self.filter.breakdown_type, + self.filter.breakdown_group_type_index, + ) ] += 1 elif self.filter.breakdown_type == "hogql": if isinstance(self.filter.breakdown, list): @@ -72,7 +86,13 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]: if breakdown["type"] == "hogql": counter = count_hogql_properties(breakdown["property"], counter) else: - counter[(breakdown["property"], breakdown["type"], self.filter.breakdown_group_type_index)] += 1 + counter[ + ( + breakdown["property"], + breakdown["type"], + self.filter.breakdown_group_type_index, + ) + ] += 1 # Both entities and funnel exclusions can contain nested property filters for entity in self.entities_used_in_filter(): @@ -104,7 +124,6 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]: and self.filter.correlation_type == FunnelCorrelationType.PROPERTIES and self.filter.correlation_property_names ): - if self.filter.aggregation_group_type_index is not None: for prop_value in self.filter.correlation_property_names: counter[(prop_value, "group", self.filter.aggregation_group_type_index)] += 1 diff --git a/ee/clickhouse/queries/enterprise_cohort_query.py b/ee/clickhouse/queries/enterprise_cohort_query.py index c0d4f8d8b9462..a007b54903bdd 100644 --- a/ee/clickhouse/queries/enterprise_cohort_query.py +++ b/ee/clickhouse/queries/enterprise_cohort_query.py @@ -43,7 +43,6 @@ def check_negation_clause(prop: PropertyGroup) -> Tuple[bool, bool]: class EnterpriseCohortQuery(FOSSCohortQuery): def get_query(self) -> Tuple[str, Dict[str, Any]]: - if not self._outer_property_groups: # everything is pushed down, no behavioral stuff to do # thus, use personQuery directly @@ -56,11 +55,19 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: subq = [] if self.sequence_filters_to_query: - sequence_query, sequence_params, sequence_query_alias = self._get_sequence_query() + ( + sequence_query, + sequence_params, + sequence_query_alias, + ) = self._get_sequence_query() subq.append((sequence_query, sequence_query_alias)) self.params.update(sequence_params) else: - behavior_subquery, behavior_subquery_params, behavior_query_alias = self._get_behavior_subquery() + ( + behavior_subquery, + behavior_subquery_params, + behavior_query_alias, + ) = self._get_behavior_subquery() subq.append((behavior_subquery, behavior_query_alias)) self.params.update(behavior_subquery_params) @@ -81,7 +88,6 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: return final_query, self.params def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: - res: str = "" params: Dict[str, Any] = {} @@ -139,7 +145,11 @@ def get_stopped_performing_event(self, prop: Property, prepend: str, idx: int) - return ( f"{'NOT' if prop.negation else ''} {column_name}", - {f"{date_param}": date_value, f"{seq_date_param}": seq_date_value, **entity_params}, + { + f"{date_param}": date_value, + f"{seq_date_param}": seq_date_value, + **entity_params, + }, ) def get_restarted_performing_event(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: @@ -174,7 +184,11 @@ def get_restarted_performing_event(self, prop: Property, prepend: str, idx: int) return ( f"{'NOT' if prop.negation else ''} {column_name}", - {f"{date_param}": date_value, f"{seq_date_param}": seq_date_value, **entity_params}, + { + f"{date_param}": date_value, + f"{seq_date_param}": seq_date_value, + **entity_params, + }, ) def get_performed_event_first_time(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: @@ -193,7 +207,10 @@ def get_performed_event_first_time(self, prop: Property, prepend: str, idx: int) self._fields.append(field) - return (f"{'NOT' if prop.negation else ''} {column_name}", {f"{date_param}": date_value, **entity_params}) + return ( + f"{'NOT' if prop.negation else ''} {column_name}", + {f"{date_param}": date_value, **entity_params}, + ) def get_performed_event_regularly(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: event = (prop.event_type, prop.key) @@ -243,7 +260,10 @@ def get_performed_event_regularly(self, prop: Property, prepend: str, idx: int) self._fields.append(field) - return (f"{'NOT' if prop.negation else ''} {column_name}", {**entity_params, **params}) + return ( + f"{'NOT' if prop.negation else ''} {column_name}", + {**entity_params, **params}, + ) @cached_property def sequence_filters_to_query(self) -> List[Property]: @@ -264,7 +284,13 @@ def _get_sequence_query(self) -> Tuple[str, Dict[str, Any], str]: params = {} materialized_columns = list(self._column_optimizer.event_columns_to_query) - names = ["event", "properties", "distinct_id", "timestamp", *materialized_columns] + names = [ + "event", + "properties", + "distinct_id", + "timestamp", + *materialized_columns, + ] person_prop_query = "" person_prop_params: dict = {} @@ -277,7 +303,12 @@ def _get_sequence_query(self) -> Tuple[str, Dict[str, Any], str]: _intermediate_fields.extend(names) for idx, prop in enumerate(self.sequence_filters_to_query): - step_cols, intermediate_cols, aggregate_cols, seq_params = self._get_sequence_filter(prop, idx) + ( + step_cols, + intermediate_cols, + aggregate_cols, + seq_params, + ) = self._get_sequence_filter(prop, idx) _inner_fields.extend(step_cols) _intermediate_fields.extend(intermediate_cols) _outer_fields.extend(aggregate_cols) @@ -316,7 +347,12 @@ def _get_sequence_query(self) -> Tuple[str, Dict[str, Any], str]: """ return ( outer_query, - {"team_id": self._team_id, event_param_name: self._events, **params, **person_prop_params}, + { + "team_id": self._team_id, + event_param_name: self._events, + **params, + **person_prop_params, + }, self.FUNNEL_QUERY_ALIAS, ) @@ -359,10 +395,21 @@ def _get_sequence_filter(self, prop: Property, idx: int) -> Tuple[List[str], Lis ) step_cols.append(f"if({event_prepend}_step_1 = 1, timestamp, null) AS {event_prepend}_latest_1") - return step_cols, condition_cols, aggregate_cols, {**entity_params, **seq_entity_params} + return ( + step_cols, + condition_cols, + aggregate_cols, + { + **entity_params, + **seq_entity_params, + }, + ) def get_performed_event_sequence(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: - return f"{self.SEQUENCE_FIELD_ALIAS}_{self.sequence_filters_lookup[str(prop.to_dict())]}", {} + return ( + f"{self.SEQUENCE_FIELD_ALIAS}_{self.sequence_filters_lookup[str(prop.to_dict())]}", + {}, + ) # Check if negations are always paired with a positive filter # raise a value error warning that this is an invalid cohort diff --git a/ee/clickhouse/queries/event_query.py b/ee/clickhouse/queries/event_query.py index a4e21bab3b222..3e57be3e3892f 100644 --- a/ee/clickhouse/queries/event_query.py +++ b/ee/clickhouse/queries/event_query.py @@ -21,7 +21,12 @@ class EnterpriseEventQuery(EventQuery): def __init__( self, filter: Union[ - Filter, PathFilter, RetentionFilter, StickinessFilter, SessionRecordingsFilter, PropertiesTimelineFilter + Filter, + PathFilter, + RetentionFilter, + StickinessFilter, + SessionRecordingsFilter, + PropertiesTimelineFilter, ], team: Team, round_interval=False, @@ -55,5 +60,8 @@ def _get_groups_query(self) -> Tuple[str, Dict]: if isinstance(self._filter, PropertiesTimelineFilter): raise Exception("Properties Timeline never needs groups query") return GroupsJoinQuery( - self._filter, self._team_id, self._column_optimizer, person_on_events_mode=self._person_on_events_mode + self._filter, + self._team_id, + self._column_optimizer, + person_on_events_mode=self._person_on_events_mode, ).get_join_query() diff --git a/ee/clickhouse/queries/experiments/funnel_experiment_result.py b/ee/clickhouse/queries/experiments/funnel_experiment_result.py index b41ba454e2f0c..4044bcfd6ac7a 100644 --- a/ee/clickhouse/queries/experiments/funnel_experiment_result.py +++ b/ee/clickhouse/queries/experiments/funnel_experiment_result.py @@ -115,7 +115,11 @@ def get_variants(self, funnel_results): failure = total - success breakdown_value = result[0]["breakdown_value"][0] if breakdown_value == CONTROL_VARIANT_KEY: - control_variant = Variant(key=breakdown_value, success_count=int(success), failure_count=int(failure)) + control_variant = Variant( + key=breakdown_value, + success_count=int(success), + failure_count=int(failure), + ) else: test_variants.append(Variant(breakdown_value, int(success), int(failure))) @@ -123,7 +127,9 @@ def get_variants(self, funnel_results): @staticmethod def calculate_results( - control_variant: Variant, test_variants: List[Variant], priors: Tuple[int, int] = (1, 1) + control_variant: Variant, + test_variants: List[Variant], + priors: Tuple[int, int] = (1, 1), ) -> List[Probability]: """ Calculates probability that A is better than B. First variant is control, rest are test variants. @@ -144,16 +150,24 @@ def calculate_results( raise ValidationError("No control variant data found", code="no_data") if len(test_variants) >= 10: - raise ValidationError("Can't calculate A/B test results for more than 10 variants", code="too_much_data") + raise ValidationError( + "Can't calculate A/B test results for more than 10 variants", + code="too_much_data", + ) if len(test_variants) < 1: - raise ValidationError("Can't calculate A/B test results for less than 2 variants", code="no_data") + raise ValidationError( + "Can't calculate A/B test results for less than 2 variants", + code="no_data", + ) return calculate_probability_of_winning_for_each([control_variant, *test_variants]) @staticmethod def are_results_significant( - control_variant: Variant, test_variants: List[Variant], probabilities: List[Probability] + control_variant: Variant, + test_variants: List[Variant], + probabilities: List[Probability], ) -> Tuple[ExperimentSignificanceCode, Probability]: control_sample_size = control_variant.success_count + control_variant.failure_count @@ -174,7 +188,8 @@ def are_results_significant( return ExperimentSignificanceCode.LOW_WIN_PROBABILITY, 1 best_test_variant = max( - test_variants, key=lambda variant: variant.success_count / (variant.success_count + variant.failure_count) + test_variants, + key=lambda variant: variant.success_count / (variant.success_count + variant.failure_count), ) expected_loss = calculate_expected_loss(best_test_variant, [control_variant]) @@ -207,12 +222,16 @@ def calculate_expected_loss(target_variant: Variant, variants: List[Variant]) -> # Get `N=simulations` samples from a Beta distribution with alpha = prior_success + variant_sucess, # and beta = prior_failure + variant_failure samples = random_sampler.beta( - variant.success_count + prior_success, variant.failure_count + prior_failure, simulations_count + variant.success_count + prior_success, + variant.failure_count + prior_failure, + simulations_count, ) variant_samples.append(samples) target_variant_samples = random_sampler.beta( - target_variant.success_count + prior_success, target_variant.failure_count + prior_failure, simulations_count + target_variant.success_count + prior_success, + target_variant.failure_count + prior_failure, + simulations_count, ) loss = 0 @@ -234,12 +253,16 @@ def simulate_winning_variant_for_conversion(target_variant: Variant, variants: L # Get `N=simulations` samples from a Beta distribution with alpha = prior_success + variant_sucess, # and beta = prior_failure + variant_failure samples = random_sampler.beta( - variant.success_count + prior_success, variant.failure_count + prior_failure, simulations_count + variant.success_count + prior_success, + variant.failure_count + prior_failure, + simulations_count, ) variant_samples.append(samples) target_variant_samples = random_sampler.beta( - target_variant.success_count + prior_success, target_variant.failure_count + prior_failure, simulations_count + target_variant.success_count + prior_success, + target_variant.failure_count + prior_failure, + simulations_count, ) winnings = 0 @@ -256,7 +279,10 @@ def calculate_probability_of_winning_for_each(variants: List[Variant]) -> List[P Calculates the probability of winning for each variant. """ if len(variants) > 10: - raise ValidationError("Can't calculate A/B test results for more than 10 variants", code="too_much_data") + raise ValidationError( + "Can't calculate A/B test results for more than 10 variants", + code="too_much_data", + ) probabilities = [] # simulate winning for each test variant diff --git a/ee/clickhouse/queries/experiments/secondary_experiment_result.py b/ee/clickhouse/queries/experiments/secondary_experiment_result.py index 476e09dfd7c73..db4d03d8845fe 100644 --- a/ee/clickhouse/queries/experiments/secondary_experiment_result.py +++ b/ee/clickhouse/queries/experiments/secondary_experiment_result.py @@ -3,7 +3,9 @@ from zoneinfo import ZoneInfo from rest_framework.exceptions import ValidationError -from ee.clickhouse.queries.experiments.trend_experiment_result import uses_math_aggregation_by_user_or_property_value +from ee.clickhouse.queries.experiments.trend_experiment_result import ( + uses_math_aggregation_by_user_or_property_value, +) from posthog.constants import INSIGHT_FUNNELS, INSIGHT_TRENDS, TRENDS_CUMULATIVE from posthog.models.feature_flag import FeatureFlag diff --git a/ee/clickhouse/queries/experiments/test_experiment_result.py b/ee/clickhouse/queries/experiments/test_experiment_result.py index 3cfd44f5a2019..d7a45509d3700 100644 --- a/ee/clickhouse/queries/experiments/test_experiment_result.py +++ b/ee/clickhouse/queries/experiments/test_experiment_result.py @@ -10,8 +10,12 @@ Variant, calculate_expected_loss, ) -from ee.clickhouse.queries.experiments.trend_experiment_result import ClickhouseTrendExperimentResult -from ee.clickhouse.queries.experiments.trend_experiment_result import Variant as CountVariant +from ee.clickhouse.queries.experiments.trend_experiment_result import ( + ClickhouseTrendExperimentResult, +) +from ee.clickhouse.queries.experiments.trend_experiment_result import ( + Variant as CountVariant, +) from ee.clickhouse.queries.experiments.trend_experiment_result import calculate_p_value from posthog.constants import ExperimentSignificanceCode @@ -40,7 +44,12 @@ def calculate_probability_of_winning_for_target(target_variant: Variant, other_v elif len(variants) == 2: return probability_C_beats_A_and_B( - variants[0][0], variants[0][1], variants[1][0], variants[1][1], target[0], target[1] + variants[0][0], + variants[0][1], + variants[1][0], + variants[1][1], + target[0], + target[1], ) elif len(variants) == 3: @@ -72,9 +81,13 @@ def probability_B_beats_A(A_success: int, A_failure: int, B_success: int, B_fail def probability_C_beats_A_and_B( - A_success: int, A_failure: int, B_success: int, B_failure: int, C_success: int, C_failure: int + A_success: int, + A_failure: int, + B_success: int, + B_failure: int, + C_success: int, + C_failure: int, ): - total: Probability = 0 for i in range(A_success): for j in range(B_success): @@ -110,7 +123,10 @@ def probability_D_beats_A_B_and_C( for j in range(B_success): for k in range(C_success): total += exp( - logbeta(D_success + i + j + k, D_failure + A_failure + B_failure + C_failure) + logbeta( + D_success + i + j + k, + D_failure + A_failure + B_failure + C_failure, + ) - log(A_failure + i) - log(B_failure + j) - log(C_failure + k) @@ -135,7 +151,6 @@ def probability_D_beats_A_B_and_C( @flaky(max_runs=10, min_passes=1) class TestFunnelExperimentCalculator(unittest.TestCase): def test_calculate_results(self): - variant_test = Variant("A", 100, 10) variant_control = Variant("B", 100, 18) @@ -177,7 +192,9 @@ def test_calculate_results_for_two_test_variants(self): self.assertAlmostEqual(probabilities[0], alternative_probability_for_control, places=2) self.assertAlmostEqual( - calculate_expected_loss(variant_test_2, [variant_control, variant_test_1]), 0.0004, places=3 + calculate_expected_loss(variant_test_2, [variant_control, variant_test_1]), + 0.0004, + places=3, ) # this loss only checks variant 2 against control @@ -206,7 +223,9 @@ def test_calculate_results_for_two_test_variants_almost_equal(self): self.assertAlmostEqual(probabilities[0], alternative_probability_for_control, places=1) self.assertAlmostEqual( - calculate_expected_loss(variant_test_2, [variant_control, variant_test_1]), 0.022, places=2 + calculate_expected_loss(variant_test_2, [variant_control, variant_test_1]), + 0.022, + places=2, ) significant, loss = ClickhouseFunnelExperimentResult.are_results_significant( @@ -254,11 +273,15 @@ def test_calculate_results_for_three_test_variants(self): self.assertAlmostEqual(probabilities[0], alternative_probability_for_control, places=1) self.assertAlmostEqual( - calculate_expected_loss(variant_test_2, [variant_control, variant_test_1, variant_test_3]), 0.0004, places=2 + calculate_expected_loss(variant_test_2, [variant_control, variant_test_1, variant_test_3]), + 0.0004, + places=2, ) significant, loss = ClickhouseFunnelExperimentResult.are_results_significant( - variant_control, [variant_test_1, variant_test_2, variant_test_3], probabilities + variant_control, + [variant_test_1, variant_test_2, variant_test_3], + probabilities, ) self.assertAlmostEqual(loss, 0.0004, places=2) self.assertEqual(significant, ExperimentSignificanceCode.SIGNIFICANT) @@ -284,7 +307,9 @@ def test_calculate_results_for_three_test_variants_almost_equal(self): self.assertAlmostEqual(probabilities[0], alternative_probability_for_control, places=1) self.assertAlmostEqual( - calculate_expected_loss(variant_test_2, [variant_control, variant_test_1, variant_test_3]), 0.033, places=2 + calculate_expected_loss(variant_test_2, [variant_control, variant_test_1, variant_test_3]), + 0.033, + places=2, ) # passing in artificial probabilities to subvert the low_probability threshold @@ -311,7 +336,9 @@ def test_calculate_results_for_three_test_variants_much_better_than_control(self self.assertAlmostEqual(probabilities[0], alternative_probability_for_control, places=1) significant, loss = ClickhouseFunnelExperimentResult.are_results_significant( - variant_control, [variant_test_1, variant_test_2, variant_test_3], probabilities + variant_control, + [variant_test_1, variant_test_2, variant_test_3], + probabilities, ) self.assertAlmostEqual(loss, 0, places=2) self.assertEqual(significant, ExperimentSignificanceCode.SIGNIFICANT) @@ -398,7 +425,12 @@ def calculate_probability_of_winning_for_target_count_data( elif len(variants) == 2: return probability_C_beats_A_and_B_count_data( - variants[0][0], variants[0][1], variants[1][0], variants[1][1], target[0], target[1] + variants[0][0], + variants[0][1], + variants[1][0], + variants[1][1], + target[0], + target[1], ) else: return 0 @@ -419,7 +451,12 @@ def probability_B_beats_A_count_data(A_count: int, A_exposure: float, B_count: i def probability_C_beats_A_and_B_count_data( - A_count: int, A_exposure: float, B_count: int, B_exposure: float, C_count: int, C_exposure: float + A_count: int, + A_exposure: float, + B_count: int, + B_exposure: float, + C_count: int, + C_exposure: float, ) -> Probability: total: Probability = 0 diff --git a/ee/clickhouse/queries/experiments/test_utils.py b/ee/clickhouse/queries/experiments/test_utils.py index eac2913e994a7..ab99819cd1e43 100644 --- a/ee/clickhouse/queries/experiments/test_utils.py +++ b/ee/clickhouse/queries/experiments/test_utils.py @@ -35,10 +35,16 @@ def test_with_feature_flag_properties_on_events(self): team=self.team, events_by_person={ "person1": [ - {"event": "user signed up", "properties": {"$os": "Windows", "$feature/aloha": "control"}}, + { + "event": "user signed up", + "properties": {"$os": "Windows", "$feature/aloha": "control"}, + }, ], "person2": [ - {"event": "user signed up", "properties": {"$os": "Windows", "$feature/aloha": "test"}}, + { + "event": "user signed up", + "properties": {"$os": "Windows", "$feature/aloha": "test"}, + }, ], }, ) @@ -57,10 +63,22 @@ def test_with_no_feature_flag_properties_on_actions(self): ActionStep.objects.create( action=action_credit_card, event="paid", - properties=[{"key": "$os", "type": "event", "value": ["Windows"], "operator": "exact"}], + properties=[ + { + "key": "$os", + "type": "event", + "value": ["Windows"], + "operator": "exact", + } + ], ) - ActionStep.objects.create(action=action_credit_card, event="$autocapture", tag_name="button", text="Pay $10") + ActionStep.objects.create( + action=action_credit_card, + event="$autocapture", + tag_name="button", + text="Pay $10", + ) filter = Filter( data={ @@ -95,7 +113,14 @@ def test_with_feature_flag_properties_on_actions(self): ActionStep.objects.create( action=action_credit_card, event="paid", - properties=[{"key": "$os", "type": "event", "value": ["Windows"], "operator": "exact"}], + properties=[ + { + "key": "$os", + "type": "event", + "value": ["Windows"], + "operator": "exact", + } + ], ) filter = Filter( @@ -116,7 +141,10 @@ def test_with_feature_flag_properties_on_actions(self): {"event": "paid", "properties": {"$os": "Windows"}}, ], "person2": [ - {"event": "paid", "properties": {"$os": "Windows", "$feature/aloha": "test"}}, + { + "event": "paid", + "properties": {"$os": "Windows", "$feature/aloha": "test"}, + }, ], "person3": [ {"event": "user signed up", "properties": {"$os": "Windows"}}, diff --git a/ee/clickhouse/queries/experiments/trend_experiment_result.py b/ee/clickhouse/queries/experiments/trend_experiment_result.py index 9252e6533ef7a..1cac73b3d6b85 100644 --- a/ee/clickhouse/queries/experiments/trend_experiment_result.py +++ b/ee/clickhouse/queries/experiments/trend_experiment_result.py @@ -101,7 +101,14 @@ def __init__( "explicit_date": True, "breakdown": breakdown_key, "breakdown_type": "event", - "properties": [{"key": breakdown_key, "value": variants, "operator": "exact", "type": "event"}], + "properties": [ + { + "key": breakdown_key, + "value": variants, + "operator": "exact", + "type": "event", + } + ], # :TRICKY: We don't use properties set on filters, instead using experiment variant options } ) @@ -148,7 +155,14 @@ def __init__( "explicit_date": True, "breakdown": breakdown_key, "breakdown_type": "event", - "properties": [{"key": breakdown_key, "value": variants, "operator": "exact", "type": "event"}], + "properties": [ + { + "key": breakdown_key, + "value": variants, + "operator": "exact", + "type": "event", + } + ], } ) else: @@ -171,8 +185,18 @@ def __init__( "breakdown_type": "event", "breakdown": "$feature_flag_response", "properties": [ - {"key": "$feature_flag_response", "value": variants, "operator": "exact", "type": "event"}, - {"key": "$feature_flag", "value": [feature_flag.key], "operator": "exact", "type": "event"}, + { + "key": "$feature_flag_response", + "value": variants, + "operator": "exact", + "type": "event", + }, + { + "key": "$feature_flag", + "value": [feature_flag.key], + "operator": "exact", + "type": "event", + }, ], } ) @@ -272,16 +296,24 @@ def calculate_results(control_variant: Variant, test_variants: List[Variant]) -> raise ValidationError("No control variant data found", code="no_data") if len(test_variants) >= 10: - raise ValidationError("Can't calculate A/B test results for more than 10 variants", code="too_much_data") + raise ValidationError( + "Can't calculate A/B test results for more than 10 variants", + code="too_much_data", + ) if len(test_variants) < 1: - raise ValidationError("Can't calculate A/B test results for less than 2 variants", code="no_data") + raise ValidationError( + "Can't calculate A/B test results for less than 2 variants", + code="no_data", + ) return calculate_probability_of_winning_for_each([control_variant, *test_variants]) @staticmethod def are_results_significant( - control_variant: Variant, test_variants: List[Variant], probabilities: List[Probability] + control_variant: Variant, + test_variants: List[Variant], + probabilities: List[Probability], ) -> Tuple[ExperimentSignificanceCode, Probability]: # TODO: Experiment with Expected Loss calculations for trend experiments @@ -339,7 +371,10 @@ def calculate_probability_of_winning_for_each(variants: List[Variant]) -> List[P """ if len(variants) > 10: - raise ValidationError("Can't calculate A/B test results for more than 10 variants", code="too_much_data") + raise ValidationError( + "Can't calculate A/B test results for more than 10 variants", + code="too_much_data", + ) probabilities = [] # simulate winning for each test variant @@ -393,5 +428,8 @@ def calculate_p_value(control_variant: Variant, test_variants: List[Variant]) -> best_test_variant = max(test_variants, key=lambda variant: variant.count) return poisson_p_value( - control_variant.count, control_variant.exposure, best_test_variant.count, best_test_variant.exposure + control_variant.count, + control_variant.exposure, + best_test_variant.count, + best_test_variant.exposure, ) diff --git a/ee/clickhouse/queries/experiments/utils.py b/ee/clickhouse/queries/experiments/utils.py index 092d779383c79..1a11df125c4ce 100644 --- a/ee/clickhouse/queries/experiments/utils.py +++ b/ee/clickhouse/queries/experiments/utils.py @@ -8,7 +8,6 @@ def requires_flag_warning(filter: Filter, team: Team) -> bool: - date_params = {} query_date_range = QueryDateRange(filter=filter, team=team, should_round=False) parsed_date_from, date_from_params = query_date_range.date_from diff --git a/ee/clickhouse/queries/funnels/funnel_correlation.py b/ee/clickhouse/queries/funnels/funnel_correlation.py index fe347862f9a73..00ebb685dc416 100644 --- a/ee/clickhouse/queries/funnels/funnel_correlation.py +++ b/ee/clickhouse/queries/funnels/funnel_correlation.py @@ -18,7 +18,11 @@ from ee.clickhouse.queries.column_optimizer import EnterpriseColumnOptimizer from ee.clickhouse.queries.groups_join_query import GroupsJoinQuery from posthog.clickhouse.materialized_columns import get_materialized_columns -from posthog.constants import AUTOCAPTURE_EVENT, TREND_FILTER_TYPE_ACTIONS, FunnelCorrelationType +from posthog.constants import ( + AUTOCAPTURE_EVENT, + TREND_FILTER_TYPE_ACTIONS, + FunnelCorrelationType, +) from posthog.models.element.element import chain_to_elements from posthog.models.event.util import ElementSerializer from posthog.models.filters import Filter @@ -96,7 +100,6 @@ class EventContingencyTable: class FunnelCorrelation: - TOTAL_IDENTIFIER = "Total_Values_In_Query" ELEMENTS_DIVIDER = "__~~__" AUTOCAPTURE_EVENT_TYPE = "$event_type" @@ -168,7 +171,10 @@ def properties_to_include(self) -> List[str]: return [f"group{self._filter.aggregation_group_type_index}_properties"] possible_mat_col = mat_event_cols.get( - (property_name, f"group{self._filter.aggregation_group_type_index}_properties") + ( + property_name, + f"group{self._filter.aggregation_group_type_index}_properties", + ) ) if possible_mat_col is not None: props_to_include.append(possible_mat_col) @@ -210,7 +216,6 @@ def get_contingency_table_query(self) -> Tuple[str, Dict[str, Any]]: return self.get_event_query() def get_event_query(self) -> Tuple[str, Dict[str, Any]]: - funnel_persons_query, funnel_persons_params = self.get_funnel_actors_cte() event_join_query = self._get_events_join_query() @@ -274,7 +279,6 @@ def get_event_query(self) -> Tuple[str, Dict[str, Any]]: return query, params def get_event_property_query(self) -> Tuple[str, Dict[str, Any]]: - if not self._filter.correlation_event_names: raise ValidationError("Event Property Correlation expects atleast one event name to run correlation on") @@ -284,7 +288,10 @@ def get_event_property_query(self) -> Tuple[str, Dict[str, Any]]: if self.support_autocapture_elements(): event_type_expression, _ = get_property_string_expr( - "events", self.AUTOCAPTURE_EVENT_TYPE, f"'{self.AUTOCAPTURE_EVENT_TYPE}'", "properties" + "events", + self.AUTOCAPTURE_EVENT_TYPE, + f"'{self.AUTOCAPTURE_EVENT_TYPE}'", + "properties", ) array_join_query = f""" 'elements_chain' as prop_key, @@ -352,7 +359,6 @@ def get_event_property_query(self) -> Tuple[str, Dict[str, Any]]: return query, params def get_properties_query(self) -> Tuple[str, Dict[str, Any]]: - if not self._filter.correlation_property_names: raise ValidationError("Property Correlation expects atleast one Property to run correlation on") @@ -360,7 +366,10 @@ def get_properties_query(self) -> Tuple[str, Dict[str, Any]]: person_prop_query, person_prop_params = self._get_properties_prop_clause() - aggregation_join_query, aggregation_join_params = self._get_aggregation_join_query() + ( + aggregation_join_query, + aggregation_join_params, + ) = self._get_aggregation_join_query() query = f""" WITH @@ -426,7 +435,6 @@ def get_properties_query(self) -> Tuple[str, Dict[str, Any]]: return query, params def _get_aggregation_target_join_query(self) -> str: - if self._team.person_on_events_mode == PersonOnEventsMode.V1_ENABLED: aggregation_person_join = f""" JOIN funnel_actors as actors @@ -498,7 +506,9 @@ def _get_aggregation_join_query(self): return "", {} person_query, person_query_params = PersonQuery( - self._filter, self._team.pk, EnterpriseColumnOptimizer(self._filter, self._team.pk) + self._filter, + self._team.pk, + EnterpriseColumnOptimizer(self._filter, self._team.pk), ).get_query() return ( @@ -512,7 +522,6 @@ def _get_aggregation_join_query(self): return GroupsJoinQuery(self._filter, self._team.pk, join_key="funnel_actors.actor_id").get_join_query() def _get_properties_prop_clause(self): - if self._team.person_on_events_mode != PersonOnEventsMode.DISABLED and groups_on_events_querying_enabled(): group_properties_field = f"group{self._filter.aggregation_group_type_index}_properties" aggregation_properties_alias = ( @@ -640,7 +649,11 @@ def _run(self) -> Tuple[List[EventOddsRatio], bool]: """ self._filter.team = self._team - event_contingency_tables, success_total, failure_total = self.get_partial_event_contingency_tables() + ( + event_contingency_tables, + success_total, + failure_total, + ) = self.get_partial_event_contingency_tables() success_total = int(correct_result_for_sampling(success_total, self._filter.sampling_factor)) failure_total = int(correct_result_for_sampling(failure_total, self._filter.sampling_factor)) @@ -678,7 +691,10 @@ def _run(self) -> Tuple[List[EventOddsRatio], bool]: return events, skewed_totals def construct_people_url( - self, success: bool, event_definition: EventDefinition, cache_invalidation_key: str + self, + success: bool, + event_definition: EventDefinition, + cache_invalidation_key: str, ) -> Optional[str]: """ Given an event_definition and success/failure flag, returns a url that @@ -688,23 +704,32 @@ def construct_people_url( """ if not self._filter.correlation_type or self._filter.correlation_type == FunnelCorrelationType.EVENTS: return self.construct_event_correlation_people_url( - success=success, event_definition=event_definition, cache_invalidation_key=cache_invalidation_key + success=success, + event_definition=event_definition, + cache_invalidation_key=cache_invalidation_key, ) elif self._filter.correlation_type == FunnelCorrelationType.EVENT_WITH_PROPERTIES: return self.construct_event_with_properties_people_url( - success=success, event_definition=event_definition, cache_invalidation_key=cache_invalidation_key + success=success, + event_definition=event_definition, + cache_invalidation_key=cache_invalidation_key, ) elif self._filter.correlation_type == FunnelCorrelationType.PROPERTIES: return self.construct_person_properties_people_url( - success=success, event_definition=event_definition, cache_invalidation_key=cache_invalidation_key + success=success, + event_definition=event_definition, + cache_invalidation_key=cache_invalidation_key, ) return None def construct_event_correlation_people_url( - self, success: bool, event_definition: EventDefinition, cache_invalidation_key: str + self, + success: bool, + event_definition: EventDefinition, + cache_invalidation_key: str, ) -> str: # NOTE: we need to convert certain params to strings. I don't think this # class should need to know these details, but shallow_clone is @@ -714,13 +739,19 @@ def construct_event_correlation_people_url( params = self._filter.shallow_clone( { "funnel_correlation_person_converted": "true" if success else "false", - "funnel_correlation_person_entity": {"id": event_definition["event"], "type": "events"}, + "funnel_correlation_person_entity": { + "id": event_definition["event"], + "type": "events", + }, } ).to_params() return f"{self._base_uri}api/person/funnel/correlation/?{urllib.parse.urlencode(params)}&cache_invalidation_key={cache_invalidation_key}" def construct_event_with_properties_people_url( - self, success: bool, event_definition: EventDefinition, cache_invalidation_key: str + self, + success: bool, + event_definition: EventDefinition, + cache_invalidation_key: str, ) -> str: if self.support_autocapture_elements(): # If we have an $autocapture event, we need to special case the @@ -741,7 +772,12 @@ def construct_event_with_properties_people_url( "id": event_name, "type": "events", "properties": [ - {"key": property_key, "value": [property_value], "type": "element", "operator": "exact"} + { + "key": property_key, + "value": [property_value], + "type": "element", + "operator": "exact", + } for property_key, property_value in elements_as_action.items() if property_value is not None ], @@ -758,7 +794,12 @@ def construct_event_with_properties_people_url( "id": event_name, "type": "events", "properties": [ - {"key": property_name, "value": property_value, "type": "event", "operator": "exact"} + { + "key": property_name, + "value": property_value, + "type": "event", + "operator": "exact", + } ], }, } @@ -766,7 +807,10 @@ def construct_event_with_properties_people_url( return f"{self._base_uri}api/person/funnel/correlation/?{urllib.parse.urlencode(params)}" def construct_person_properties_people_url( - self, success: bool, event_definition: EventDefinition, cache_invalidation_key: str + self, + success: bool, + event_definition: EventDefinition, + cache_invalidation_key: str, ) -> str: # NOTE: for property correlations, we just use the regular funnel # persons endpoint, with the breakdown value set, and we assume that @@ -861,7 +905,8 @@ def are_results_insignificant(event_contingency_table: EventContingencyTable) -> total_count = event_contingency_table.success_total + event_contingency_table.failure_total if event_contingency_table.visited.success_count + event_contingency_table.visited.failure_count < min( - FunnelCorrelation.MIN_PERSON_COUNT, FunnelCorrelation.MIN_PERSON_PERCENTAGE * total_count + FunnelCorrelation.MIN_PERSON_COUNT, + FunnelCorrelation.MIN_PERSON_PERCENTAGE * total_count, ): return True @@ -873,11 +918,15 @@ def serialize_event_odds_ratio(self, odds_ratio: EventOddsRatio) -> EventOddsRat return { "success_count": odds_ratio["success_count"], "success_people_url": self.construct_people_url( - success=True, event_definition=event_definition, cache_invalidation_key=cache_invalidation_key + success=True, + event_definition=event_definition, + cache_invalidation_key=cache_invalidation_key, ), "failure_count": odds_ratio["failure_count"], "failure_people_url": self.construct_people_url( - success=False, event_definition=event_definition, cache_invalidation_key=cache_invalidation_key + success=False, + event_definition=event_definition, + cache_invalidation_key=cache_invalidation_key, ), "odds_ratio": odds_ratio["odds_ratio"], "correlation_type": odds_ratio["correlation_type"], @@ -893,19 +942,20 @@ def serialize_event_with_property(self, event: str) -> EventDefinition: event_name, property_name, property_value = event.split("::") if event_name == AUTOCAPTURE_EVENT and property_name == "elements_chain": - event_type, elements_chain = property_value.split(self.ELEMENTS_DIVIDER) return EventDefinition( event=event, properties={self.AUTOCAPTURE_EVENT_TYPE: event_type}, - elements=cast(list, ElementSerializer(chain_to_elements(elements_chain), many=True).data), + elements=cast( + list, + ElementSerializer(chain_to_elements(elements_chain), many=True).data, + ), ) return EventDefinition(event=event, properties={}, elements=[]) def get_entity_odds_ratio(event_contingency_table: EventContingencyTable, prior_counts: int) -> EventOddsRatio: - # Add 1 to all values to prevent divide by zero errors, and introduce a [prior](https://en.wikipedia.org/wiki/Prior_probability) odds_ratio = ( (event_contingency_table.visited.success_count + prior_counts) diff --git a/ee/clickhouse/queries/funnels/funnel_correlation_persons.py b/ee/clickhouse/queries/funnels/funnel_correlation_persons.py index dc682d4445bc3..3b83344d502aa 100644 --- a/ee/clickhouse/queries/funnels/funnel_correlation_persons.py +++ b/ee/clickhouse/queries/funnels/funnel_correlation_persons.py @@ -4,14 +4,22 @@ from rest_framework.exceptions import ValidationError from ee.clickhouse.queries.funnels.funnel_correlation import FunnelCorrelation -from posthog.constants import FUNNEL_CORRELATION_PERSON_LIMIT, FunnelCorrelationType, PropertyOperatorType +from posthog.constants import ( + FUNNEL_CORRELATION_PERSON_LIMIT, + FunnelCorrelationType, + PropertyOperatorType, +) from posthog.models import Person from posthog.models.entity import Entity from posthog.models.filters.filter import Filter from posthog.models.filters.mixins.utils import cached_property from posthog.models.group import Group from posthog.models.team import Team -from posthog.queries.actor_base_query import ActorBaseQuery, SerializedGroup, SerializedPerson +from posthog.queries.actor_base_query import ( + ActorBaseQuery, + SerializedGroup, + SerializedPerson, +) from posthog.queries.funnels.funnel_event_query import FunnelEventQuery from posthog.queries.util import get_person_properties_mode @@ -44,7 +52,7 @@ def actor_query(self, limit_actors: Optional[bool] = True): def get_actors( self, - ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int]: + ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int,]: if self._filter.correlation_type == FunnelCorrelationType.PROPERTIES: return _FunnelPropertyCorrelationActors(self._filter, self._team, self._base_uri).get_actors() else: @@ -64,13 +72,15 @@ def aggregation_group_type_index(self): return self._filter.aggregation_group_type_index def actor_query(self, limit_actors: Optional[bool] = True): - if not self._filter.correlation_person_entity: raise ValidationError("No entity for persons specified") assert isinstance(self._filter.correlation_person_entity, Entity) - funnel_persons_query, funnel_persons_params = self._funnel_correlation.get_funnel_actors_cte() + ( + funnel_persons_query, + funnel_persons_params, + ) = self._funnel_correlation.get_funnel_actors_cte() prop_filters = self._filter.correlation_person_entity.property_groups @@ -150,11 +160,18 @@ def __init__(self, filter: Filter, team: Team, base_uri: str = "/") -> None: def aggregation_group_type_index(self): return self._filter.aggregation_group_type_index - def actor_query(self, limit_actors: Optional[bool] = True, extra_fields: Optional[List[str]] = None): + def actor_query( + self, + limit_actors: Optional[bool] = True, + extra_fields: Optional[List[str]] = None, + ): if not self._filter.correlation_property_values: raise ValidationError("Property Correlation expects atleast one Property to get persons for") - funnel_persons_query, funnel_persons_params = self._funnel_correlation.get_funnel_actors_cte() + ( + funnel_persons_query, + funnel_persons_params, + ) = self._funnel_correlation.get_funnel_actors_cte() conversion_filter = ( f'funnel_actors.steps {"=" if self._filter.correlation_persons_converted else "<>"} target_step' diff --git a/ee/clickhouse/queries/funnels/test/breakdown_cases.py b/ee/clickhouse/queries/funnels/test/breakdown_cases.py index b3a41cb1da192..f4fb2689d87b7 100644 --- a/ee/clickhouse/queries/funnels/test/breakdown_cases.py +++ b/ee/clickhouse/queries/funnels/test/breakdown_cases.py @@ -7,7 +7,10 @@ from posthog.models.group_type_mapping import GroupTypeMapping from posthog.models.instance_setting import override_instance_config from posthog.queries.funnels.funnel_unordered import ClickhouseFunnelUnordered -from posthog.queries.funnels.test.breakdown_cases import FunnelStepResult, assert_funnel_results_equal +from posthog.queries.funnels.test.breakdown_cases import ( + FunnelStepResult, + assert_funnel_results_equal, +) from posthog.test.base import ( APIBaseTest, also_test_with_materialized_columns, @@ -30,12 +33,23 @@ def _create_groups(self): GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) create_group( - team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"} + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, ) create_group( - team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"} + team_id=self.team.pk, + group_type_index=1, + group_key="org:5", + properties={"industry": "random"}, ) - create_group(team_id=self.team.pk, group_type_index=1, group_key="org:5", properties={"industry": "random"}) def _assert_funnel_breakdown_result_is_correct(self, result, steps: List[FunnelStepResult]): def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]: @@ -52,7 +66,10 @@ def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]: "breakdown": step.breakdown, "breakdown_value": step.breakdown, **( - {"action_id": None, "name": f"Completed {order+1} step{'s' if order > 0 else ''}"} + { + "action_id": None, + "name": f"Completed {order+1} step{'s' if order > 0 else ''}", + } if Funnel == ClickhouseFunnelUnordered else {} ), @@ -111,7 +128,11 @@ def test_funnel_breakdown_group(self): ) filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -146,8 +167,14 @@ def test_funnel_breakdown_group(self): ) # Querying persons when aggregating by persons should be ok, despite group breakdown - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "finance"), [people["person1"].uuid]) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "finance"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "finance"), + [people["person1"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "finance"), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[1], @@ -165,9 +192,13 @@ def test_funnel_breakdown_group(self): ) self.assertCountEqual( - self._get_actor_ids_at_step(filter, 1, "technology"), [people["person2"].uuid, people["person3"].uuid] + self._get_actor_ids_at_step(filter, 1, "technology"), + [people["person2"].uuid, people["person3"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "technology"), + [people["person2"].uuid], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "technology"), [people["person2"].uuid]) # TODO: Delete this test when moved to person-on-events @also_test_with_person_on_events_v2 @@ -217,7 +248,11 @@ def test_funnel_aggregate_by_groups_breakdown_group(self): ) filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -273,7 +308,8 @@ def test_funnel_aggregate_by_groups_breakdown_group(self): ) @also_test_with_materialized_columns( - group_properties=[(0, "industry")], materialize_only_with_person_on_events=True + group_properties=[(0, "industry")], + materialize_only_with_person_on_events=True, ) @also_test_with_person_on_events_v2 @snapshot_clickhouse_queries @@ -323,7 +359,11 @@ def test_funnel_aggregate_by_groups_breakdown_group_person_on_events(self): ) filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", diff --git a/ee/clickhouse/queries/funnels/test/test_funnel.py b/ee/clickhouse/queries/funnels/test/test_funnel.py index bd8122b16f7a0..9e28240d28ad6 100644 --- a/ee/clickhouse/queries/funnels/test/test_funnel.py +++ b/ee/clickhouse/queries/funnels/test/test_funnel.py @@ -1,6 +1,8 @@ from datetime import datetime -from ee.clickhouse.queries.funnels.test.breakdown_cases import funnel_breakdown_group_test_factory +from ee.clickhouse.queries.funnels.test.breakdown_cases import ( + funnel_breakdown_group_test_factory, +) from posthog.constants import INSIGHT_FUNNELS from posthog.models.action import Action from posthog.models.action_step import ActionStep @@ -11,21 +13,55 @@ from posthog.queries.funnels.funnel import ClickhouseFunnel from posthog.queries.funnels.funnel_persons import ClickhouseFunnelActors from posthog.queries.funnels.funnel_strict_persons import ClickhouseFunnelStrictActors -from posthog.queries.funnels.funnel_unordered_persons import ClickhouseFunnelUnorderedActors +from posthog.queries.funnels.funnel_unordered_persons import ( + ClickhouseFunnelUnorderedActors, +) from posthog.queries.funnels.test.test_funnel import _create_action -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) from posthog.test.test_journeys import journeys_for -class TestFunnelGroupBreakdown(ClickhouseTestMixin, funnel_breakdown_group_test_factory(ClickhouseFunnel, ClickhouseFunnelActors, _create_event, _create_action, _create_person)): # type: ignore +class TestFunnelGroupBreakdown( + ClickhouseTestMixin, + funnel_breakdown_group_test_factory( + ClickhouseFunnel, + ClickhouseFunnelActors, + _create_event, + _create_action, + _create_person, + ), +): # type: ignore pass -class TestUnorderedFunnelGroupBreakdown(ClickhouseTestMixin, funnel_breakdown_group_test_factory(ClickhouseFunnel, ClickhouseFunnelUnorderedActors, _create_event, _create_action, _create_person)): # type: ignore +class TestUnorderedFunnelGroupBreakdown( + ClickhouseTestMixin, + funnel_breakdown_group_test_factory( + ClickhouseFunnel, + ClickhouseFunnelUnorderedActors, + _create_event, + _create_action, + _create_person, + ), +): # type: ignore pass -class TestStrictFunnelGroupBreakdown(ClickhouseTestMixin, funnel_breakdown_group_test_factory(ClickhouseFunnel, ClickhouseFunnelStrictActors, _create_event, _create_action, _create_person)): # type: ignore +class TestStrictFunnelGroupBreakdown( + ClickhouseTestMixin, + funnel_breakdown_group_test_factory( + ClickhouseFunnel, + ClickhouseFunnelStrictActors, + _create_event, + _create_action, + _create_person, + ), +): # type: ignore pass @@ -33,19 +69,50 @@ class TestClickhouseFunnel(ClickhouseTestMixin, APIBaseTest): maxDiff = None def test_funnel_aggregation_with_groups_with_cohort_filtering(self): - GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:2", properties={}) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:1", + properties={}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:2", + properties={}, + ) - _create_person(distinct_ids=[f"user_1"], team=self.team, properties={"email": "fake@test.com"}) - _create_person(distinct_ids=[f"user_2"], team=self.team, properties={"email": "fake@test.com"}) - _create_person(distinct_ids=[f"user_3"], team=self.team, properties={"email": "fake_2@test.com"}) + _create_person( + distinct_ids=[f"user_1"], + team=self.team, + properties={"email": "fake@test.com"}, + ) + _create_person( + distinct_ids=[f"user_2"], + team=self.team, + properties={"email": "fake@test.com"}, + ) + _create_person( + distinct_ids=[f"user_3"], + team=self.team, + properties={"email": "fake_2@test.com"}, + ) action1 = Action.objects.create(team=self.team, name="action1") ActionStep.objects.create(event="$pageview", action=action1) @@ -53,14 +120,31 @@ def test_funnel_aggregation_with_groups_with_cohort_filtering(self): cohort = Cohort.objects.create( team=self.team, groups=[ - {"properties": [{"key": "email", "operator": "icontains", "value": "fake@test.com", "type": "person"}]} + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": "fake@test.com", + "type": "person", + } + ] + } ], ) events_by_person = { "user_1": [ - {"event": "$pageview", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$group_0": "org:5"}}, - {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$group_0": "org:5"}}, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$group_0": "org:5"}, + }, + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$group_0": "org:5"}, + }, { "event": "user signed up", # same person, different group, so should count as different step 1 in funnel "timestamp": datetime(2020, 1, 10, 14), @@ -75,7 +159,11 @@ def test_funnel_aggregation_with_groups_with_cohort_filtering(self): } ], "user_3": [ - {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$group_0": "org:7"}}, + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$group_0": "org:7"}, + }, { # person not in cohort so should be filtered out "event": "paid", "timestamp": datetime(2020, 1, 3, 14), @@ -92,7 +180,13 @@ def test_funnel_aggregation_with_groups_with_cohort_filtering(self): "id": "user signed up", "type": "events", "order": 0, - "properties": [{"type": "precalculated-cohort", "key": "id", "value": cohort.pk}], + "properties": [ + { + "type": "precalculated-cohort", + "key": "id", + "value": cohort.pk, + } + ], }, {"id": "paid", "type": "events", "order": 1}, ], diff --git a/ee/clickhouse/queries/funnels/test/test_funnel_correlation.py b/ee/clickhouse/queries/funnels/test/test_funnel_correlation.py index 3355c5ad65627..12f4393bded9b 100644 --- a/ee/clickhouse/queries/funnels/test/test_funnel_correlation.py +++ b/ee/clickhouse/queries/funnels/test/test_funnel_correlation.py @@ -2,8 +2,14 @@ from rest_framework.exceptions import ValidationError -from ee.clickhouse.queries.funnels.funnel_correlation import EventContingencyTable, EventStats, FunnelCorrelation -from ee.clickhouse.queries.funnels.funnel_correlation_persons import FunnelCorrelationActors +from ee.clickhouse.queries.funnels.funnel_correlation import ( + EventContingencyTable, + EventStats, + FunnelCorrelation, +) +from ee.clickhouse.queries.funnels.funnel_correlation_persons import ( + FunnelCorrelationActors, +) from posthog.constants import INSIGHT_FUNNELS from posthog.models.action import Action from posthog.models.action_step import ActionStep @@ -35,13 +41,16 @@ def _create_action(**kwargs): class TestClickhouseFunnelCorrelation(ClickhouseTestMixin, APIBaseTest): - maxDiff = None def _get_actors_for_event(self, filter: Filter, event_name: str, properties=None, success=True): actor_filter = filter.shallow_clone( { - "funnel_correlation_person_entity": {"id": event_name, "type": "events", "properties": properties}, + "funnel_correlation_person_entity": { + "id": event_name, + "type": "events", + "properties": properties, + }, "funnel_correlation_person_converted": "TrUe" if success else "falSE", } ) @@ -53,7 +62,12 @@ def _get_actors_for_property(self, filter: Filter, property_values: list, succes actor_filter = filter.shallow_clone( { "funnel_correlation_property_values": [ - {"key": prop, "value": value, "type": type, "group_type_index": group_type_index} + { + "key": prop, + "value": value, + "type": type, + "group_type_index": group_type_index, + } for prop, value, type, group_type_index in property_values ], "funnel_correlation_person_converted": "TrUe" if success else "falSE", @@ -80,7 +94,10 @@ def test_basic_funnel_correlation_with_events(self): for i in range(10): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: _create_event( @@ -89,12 +106,20 @@ def test_basic_funnel_correlation_with_events(self): distinct_id=f"user_{i}", timestamp="2020-01-03T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", + ) for i in range(10, 20): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: _create_event( @@ -133,8 +158,14 @@ def test_basic_funnel_correlation_with_events(self): ) self.assertEqual(len(self._get_actors_for_event(filter, "positively_related")), 5) - self.assertEqual(len(self._get_actors_for_event(filter, "positively_related", success=False)), 0) - self.assertEqual(len(self._get_actors_for_event(filter, "negatively_related", success=False)), 5) + self.assertEqual( + len(self._get_actors_for_event(filter, "positively_related", success=False)), + 0, + ) + self.assertEqual( + len(self._get_actors_for_event(filter, "negatively_related", success=False)), + 5, + ) self.assertEqual(len(self._get_actors_for_event(filter, "negatively_related")), 0) # Now exclude positively_related @@ -162,31 +193,50 @@ def test_basic_funnel_correlation_with_events(self): ) # Getting specific people isn't affected by exclude_events self.assertEqual(len(self._get_actors_for_event(filter, "positively_related")), 5) - self.assertEqual(len(self._get_actors_for_event(filter, "positively_related", success=False)), 0) - self.assertEqual(len(self._get_actors_for_event(filter, "negatively_related", success=False)), 5) + self.assertEqual( + len(self._get_actors_for_event(filter, "positively_related", success=False)), + 0, + ) + self.assertEqual( + len(self._get_actors_for_event(filter, "negatively_related", success=False)), + 5, + ) self.assertEqual(len(self._get_actors_for_event(filter, "negatively_related")), 0) @snapshot_clickhouse_queries def test_action_events_are_excluded_from_correlations(self): - journey = {} for i in range(3): person_id = f"user_{i}" events = [ - {"event": "user signed up", "timestamp": "2020-01-02T14:00:00", "properties": {"key": "val"}}, + { + "event": "user signed up", + "timestamp": "2020-01-02T14:00:00", + "properties": {"key": "val"}, + }, # same event, but missing property, so not part of action. {"event": "user signed up", "timestamp": "2020-01-02T14:10:00"}, ] if i % 2 == 0: events.append({"event": "positively_related", "timestamp": "2020-01-03T14:00:00"}) - events.append({"event": "paid", "timestamp": "2020-01-04T14:00:00", "properties": {"key": "val"}}) + events.append( + { + "event": "paid", + "timestamp": "2020-01-04T14:00:00", + "properties": {"key": "val"}, + } + ) journey[person_id] = events # one failure needed journey["failure"] = [ - {"event": "user signed up", "timestamp": "2020-01-02T14:00:00", "properties": {"key": "val"}} + { + "event": "user signed up", + "timestamp": "2020-01-02T14:00:00", + "properties": {"key": "val"}, + } ] journeys_for(events_by_person=journey, team=self.team) # type: ignore @@ -204,7 +254,10 @@ def test_action_events_are_excluded_from_correlations(self): ) filters = { "events": [], - "actions": [{"id": sign_up_action.id, "order": 0}, {"id": paid_action.id, "order": 1}], + "actions": [ + {"id": sign_up_action.id, "order": 0}, + {"id": paid_action.id, "order": 1}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-14", @@ -233,11 +286,26 @@ def test_action_events_are_excluded_from_correlations(self): @snapshot_clickhouse_queries def test_funnel_correlation_with_events_and_groups(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:7", properties={"industry": "finance"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:7", + properties={"industry": "finance"}, + ) for i in range(10, 20): - create_group(team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={}, + ) _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( team=self.team, @@ -352,13 +420,28 @@ def test_funnel_correlation_with_events_and_groups(self): ) self.assertEqual(len(self._get_actors_for_event(filter, "positively_related")), 5) - self.assertEqual(len(self._get_actors_for_event(filter, "positively_related", success=False)), 0) + self.assertEqual( + len(self._get_actors_for_event(filter, "positively_related", success=False)), + 0, + ) self.assertEqual(len(self._get_actors_for_event(filter, "negatively_related")), 1) - self.assertEqual(len(self._get_actors_for_event(filter, "negatively_related", success=False)), 1) + self.assertEqual( + len(self._get_actors_for_event(filter, "negatively_related", success=False)), + 1, + ) # Now exclude all groups in positive filter = filter.shallow_clone( - {"properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}]} + { + "properties": [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ] + } ) result = FunnelCorrelation(filter, self.team)._run()[0] @@ -382,7 +465,10 @@ def test_funnel_correlation_with_events_and_groups(self): ) self.assertEqual(len(self._get_actors_for_event(filter, "negatively_related")), 1) - self.assertEqual(len(self._get_actors_for_event(filter, "negatively_related", success=False)), 1) + self.assertEqual( + len(self._get_actors_for_event(filter, "negatively_related", success=False)), + 1, + ) @also_test_with_materialized_columns(event_properties=[], person_properties=["$browser"]) @snapshot_clickhouse_queries @@ -403,16 +489,35 @@ def test_basic_funnel_correlation_with_properties(self): correlation = FunnelCorrelation(filter, self.team) for i in range(10): - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Positive"}) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") for i in range(10, 20): - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Negative"}) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: _create_event( @@ -423,17 +528,36 @@ def test_basic_funnel_correlation_with_properties(self): ) # One Positive with failure - _create_person(distinct_ids=[f"user_fail"], team_id=self.team.pk, properties={"$browser": "Positive"}) + _create_person( + distinct_ids=[f"user_fail"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_fail", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_fail", + timestamp="2020-01-02T14:00:00Z", ) # One Negative with success - _create_person(distinct_ids=[f"user_succ"], team_id=self.team.pk, properties={"$browser": "Negative"}) + _create_person( + distinct_ids=[f"user_succ"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_succ", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_succ", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_succ", + timestamp="2020-01-04T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_succ", timestamp="2020-01-04T14:00:00Z") result = correlation._run()[0] @@ -478,13 +602,21 @@ def test_basic_funnel_correlation_with_properties(self): ], ) - self.assertEqual(len(self._get_actors_for_property(filter, [("$browser", "Positive", "person", None)])), 10) self.assertEqual( - len(self._get_actors_for_property(filter, [("$browser", "Positive", "person", None)], False)), 1 + len(self._get_actors_for_property(filter, [("$browser", "Positive", "person", None)])), + 10, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("$browser", "Positive", "person", None)], False)), + 1, ) - self.assertEqual(len(self._get_actors_for_property(filter, [("$browser", "Negative", "person", None)])), 1) self.assertEqual( - len(self._get_actors_for_property(filter, [("$browser", "Negative", "person", None)], False)), 10 + len(self._get_actors_for_property(filter, [("$browser", "Negative", "person", None)])), + 1, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("$browser", "Negative", "person", None)], False)), + 10, ) # TODO: Delete this test when moved to person-on-events @@ -497,9 +629,16 @@ def test_funnel_correlation_with_properties_and_groups(self): for i in range(10): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={"industry": "positive"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={"industry": "positive"}, + ) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, ) - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Positive"}) _create_event( team=self.team, event="user signed up", @@ -517,9 +656,16 @@ def test_funnel_correlation_with_properties_and_groups(self): for i in range(10, 20): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={"industry": "negative"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={"industry": "negative"}, + ) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, ) - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Negative"}) _create_event( team=self.team, event="user signed up", @@ -538,9 +684,16 @@ def test_funnel_correlation_with_properties_and_groups(self): # One Positive with failure create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:fail", properties={"industry": "positive"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:fail", + properties={"industry": "positive"}, + ) + _create_person( + distinct_ids=[f"user_fail"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, ) - _create_person(distinct_ids=[f"user_fail"], team_id=self.team.pk, properties={"$browser": "Positive"}) _create_event( team=self.team, event="user signed up", @@ -551,9 +704,16 @@ def test_funnel_correlation_with_properties_and_groups(self): # One Negative with success create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:succ", properties={"industry": "negative"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:succ", + properties={"industry": "negative"}, + ) + _create_person( + distinct_ids=[f"user_succ"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, ) - _create_person(distinct_ids=[f"user_succ"], team_id=self.team.pk, properties={"$browser": "Negative"}) _create_event( team=self.team, event="user signed up", @@ -627,10 +787,22 @@ def test_funnel_correlation_with_properties_and_groups(self): ], ) - self.assertEqual(len(self._get_actors_for_property(filter, [("industry", "positive", "group", 0)])), 10) - self.assertEqual(len(self._get_actors_for_property(filter, [("industry", "positive", "group", 0)], False)), 1) - self.assertEqual(len(self._get_actors_for_property(filter, [("industry", "negative", "group", 0)])), 1) - self.assertEqual(len(self._get_actors_for_property(filter, [("industry", "negative", "group", 0)], False)), 10) + self.assertEqual( + len(self._get_actors_for_property(filter, [("industry", "positive", "group", 0)])), + 10, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("industry", "positive", "group", 0)], False)), + 1, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("industry", "negative", "group", 0)])), + 1, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("industry", "negative", "group", 0)], False)), + 10, + ) # test with `$all` as property # _run property correlation with filter on all properties @@ -659,9 +831,16 @@ def test_funnel_correlation_with_properties_and_groups_person_on_events(self): for i in range(10): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={"industry": "positive"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={"industry": "positive"}, + ) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, ) - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Positive"}) _create_event( team=self.team, event="user signed up", @@ -679,9 +858,16 @@ def test_funnel_correlation_with_properties_and_groups_person_on_events(self): for i in range(10, 20): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={"industry": "negative"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={"industry": "negative"}, + ) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, ) - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Negative"}) _create_event( team=self.team, event="user signed up", @@ -700,9 +886,16 @@ def test_funnel_correlation_with_properties_and_groups_person_on_events(self): # One Positive with failure create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:fail", properties={"industry": "positive"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:fail", + properties={"industry": "positive"}, + ) + _create_person( + distinct_ids=[f"user_fail"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, ) - _create_person(distinct_ids=[f"user_fail"], team_id=self.team.pk, properties={"$browser": "Positive"}) _create_event( team=self.team, event="user signed up", @@ -713,9 +906,16 @@ def test_funnel_correlation_with_properties_and_groups_person_on_events(self): # One Negative with success create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:succ", properties={"industry": "negative"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:succ", + properties={"industry": "negative"}, + ) + _create_person( + distinct_ids=[f"user_succ"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, ) - _create_person(distinct_ids=[f"user_succ"], team_id=self.team.pk, properties={"$browser": "Negative"}) _create_event( team=self.team, event="user signed up", @@ -790,13 +990,21 @@ def test_funnel_correlation_with_properties_and_groups_person_on_events(self): ], ) - self.assertEqual(len(self._get_actors_for_property(filter, [("industry", "positive", "group", 0)])), 10) self.assertEqual( - len(self._get_actors_for_property(filter, [("industry", "positive", "group", 0)], False)), 1 + len(self._get_actors_for_property(filter, [("industry", "positive", "group", 0)])), + 10, ) - self.assertEqual(len(self._get_actors_for_property(filter, [("industry", "negative", "group", 0)])), 1) self.assertEqual( - len(self._get_actors_for_property(filter, [("industry", "negative", "group", 0)], False)), 10 + len(self._get_actors_for_property(filter, [("industry", "positive", "group", 0)], False)), + 1, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("industry", "negative", "group", 0)])), + 1, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("industry", "negative", "group", 0)], False)), + 10, ) # test with `$all` as property @@ -828,18 +1036,42 @@ def test_no_divide_by_zero_errors(self): correlation = FunnelCorrelation(filter, self.team) for i in range(2): - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Positive"}) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) # failure count for this event is 0 - _create_event(team=self.team, event="positive", distinct_id=f"user_{i}", timestamp="2020-01-03T14:00:00Z") - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") + _create_event( + team=self.team, + event="positive", + distinct_id=f"user_{i}", + timestamp="2020-01-03T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", + ) for i in range(2, 4): - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Negative"}) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: # success count for this event is 0 @@ -897,10 +1129,29 @@ def test_correlation_with_properties_raises_validation_error(self): filter = Filter(data=filters) correlation = FunnelCorrelation(filter, self.team) - _create_person(distinct_ids=[f"user_1"], team_id=self.team.pk, properties={"$browser": "Positive"}) - _create_event(team=self.team, event="user signed up", distinct_id=f"user_1", timestamp="2020-01-02T14:00:00Z") - _create_event(team=self.team, event="rick", distinct_id=f"user_1", timestamp="2020-01-03T14:00:00Z") - _create_event(team=self.team, event="paid", distinct_id=f"user_1", timestamp="2020-01-04T14:00:00Z") + _create_person( + distinct_ids=[f"user_1"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id=f"user_1", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="rick", + distinct_id=f"user_1", + timestamp="2020-01-03T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_1", + timestamp="2020-01-04T14:00:00Z", + ) flush_persons_and_events() with self.assertRaises(ValidationError): @@ -933,44 +1184,88 @@ def test_correlation_with_multiple_properties(self): #  5 successful people with both properties for i in range(5): _create_person( - distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Positive", "$nice": "very"} + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Positive", "$nice": "very"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") #  10 successful people with some different properties for i in range(5, 15): _create_person( - distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Positive", "$nice": "not"} + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Positive", "$nice": "not"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") # 5 Unsuccessful people with some common properties for i in range(15, 20): _create_person( - distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Negative", "$nice": "smh"} + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Negative", "$nice": "smh"}, ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) # One Positive with failure, no $nice property - _create_person(distinct_ids=[f"user_fail"], team_id=self.team.pk, properties={"$browser": "Positive"}) + _create_person( + distinct_ids=[f"user_fail"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_fail", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_fail", + timestamp="2020-01-02T14:00:00Z", ) # One Negative with success, no $nice property - _create_person(distinct_ids=[f"user_succ"], team_id=self.team.pk, properties={"$browser": "Negative"}) + _create_person( + distinct_ids=[f"user_succ"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_succ", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_succ", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_succ", + timestamp="2020-01-04T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_succ", timestamp="2020-01-04T14:00:00Z") result = correlation._run()[0] @@ -1073,9 +1368,18 @@ def test_correlation_with_multiple_properties(self): self.assertEqual(new_result, new_expected_result) - self.assertEqual(len(self._get_actors_for_property(filter, [("$nice", "not", "person", None)])), 10) - self.assertEqual(len(self._get_actors_for_property(filter, [("$nice", "", "person", None)], False)), 1) - self.assertEqual(len(self._get_actors_for_property(filter, [("$nice", "very", "person", None)])), 5) + self.assertEqual( + len(self._get_actors_for_property(filter, [("$nice", "not", "person", None)])), + 10, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("$nice", "", "person", None)], False)), + 1, + ) + self.assertEqual( + len(self._get_actors_for_property(filter, [("$nice", "very", "person", None)])), + 5, + ) def test_discarding_insignificant_events(self): filters = { @@ -1095,7 +1399,10 @@ def test_discarding_insignificant_events(self): for i in range(10): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: _create_event( @@ -1111,12 +1418,20 @@ def test_discarding_insignificant_events(self): distinct_id=f"user_{i}", timestamp="2020-01-03T14:20:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", + ) for i in range(10, 20): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: _create_event( @@ -1162,16 +1477,30 @@ def test_events_within_conversion_window_for_correlation(self): _create_person(distinct_ids=["user_successful"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="user_successful", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id="user_successful", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="positively_related", + distinct_id="user_successful", + timestamp="2020-01-02T14:02:00Z", ) _create_event( - team=self.team, event="positively_related", distinct_id="user_successful", timestamp="2020-01-02T14:02:00Z" + team=self.team, + event="paid", + distinct_id="user_successful", + timestamp="2020-01-02T14:06:00Z", ) - _create_event(team=self.team, event="paid", distinct_id="user_successful", timestamp="2020-01-02T14:06:00Z") _create_person(distinct_ids=["user_dropoff"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="user_dropoff", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id="user_dropoff", + timestamp="2020-01-02T14:00:00Z", ) _create_event( team=self.team, @@ -1212,7 +1541,10 @@ def test_funnel_correlation_with_event_properties(self): "date_from": "2020-01-01", "date_to": "2020-01-14", "funnel_correlation_type": "event_with_properties", - "funnel_correlation_event_names": ["positively_related", "negatively_related"], + "funnel_correlation_event_names": [ + "positively_related", + "negatively_related", + ], } filter = Filter(data=filters) @@ -1221,7 +1553,10 @@ def test_funnel_correlation_with_event_properties(self): for i in range(10): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: _create_event( @@ -1229,15 +1564,26 @@ def test_funnel_correlation_with_event_properties(self): event="positively_related", distinct_id=f"user_{i}", timestamp="2020-01-03T14:00:00Z", - properties={"signup_source": "facebook" if i % 4 == 0 else "email", "blah": "value_bleh"}, + properties={ + "signup_source": "facebook" if i % 4 == 0 else "email", + "blah": "value_bleh", + }, ) # source: email occurs only twice, so would be discarded from result set - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", + ) for i in range(10, 20): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: _create_event( @@ -1284,15 +1630,21 @@ def test_funnel_correlation_with_event_properties(self): ], ) - self.assertEqual(len(self._get_actors_for_event(filter, "positively_related", {"blah": "value_bleh"})), 5) self.assertEqual( - len(self._get_actors_for_event(filter, "positively_related", {"signup_source": "facebook"})), 3 + len(self._get_actors_for_event(filter, "positively_related", {"blah": "value_bleh"})), + 5, + ) + self.assertEqual( + len(self._get_actors_for_event(filter, "positively_related", {"signup_source": "facebook"})), + 3, ) self.assertEqual( - len(self._get_actors_for_event(filter, "positively_related", {"signup_source": "facebook"}, False)), 0 + len(self._get_actors_for_event(filter, "positively_related", {"signup_source": "facebook"}, False)), + 0, ) self.assertEqual( - len(self._get_actors_for_event(filter, "negatively_related", {"signup_source": "email"}, False)), 3 + len(self._get_actors_for_event(filter, "negatively_related", {"signup_source": "email"}, False)), + 3, ) @also_test_with_materialized_columns(["blah", "signup_source"], verify_no_jsonextract=False) @@ -1303,7 +1655,10 @@ def test_funnel_correlation_with_event_properties_and_groups(self): for i in range(10): create_group( - team_id=self.team.pk, group_type_index=1, group_key=f"org:{i}", properties={"industry": "positive"} + team_id=self.team.pk, + group_type_index=1, + group_key=f"org:{i}", + properties={"industry": "positive"}, ) _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( @@ -1336,7 +1691,10 @@ def test_funnel_correlation_with_event_properties_and_groups(self): for i in range(10, 20): create_group( - team_id=self.team.pk, group_type_index=1, group_key=f"org:{i}", properties={"industry": "positive"} + team_id=self.team.pk, + group_type_index=1, + group_key=f"org:{i}", + properties={"industry": "positive"}, ) _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( @@ -1352,7 +1710,10 @@ def test_funnel_correlation_with_event_properties_and_groups(self): event="negatively_related", distinct_id=f"user_{i}", timestamp="2020-01-03T14:00:00Z", - properties={"signup_source": "shazam" if i % 6 == 0 else "email", "$group_1": f"org:{i}"}, + properties={ + "signup_source": "shazam" if i % 6 == 0 else "email", + "$group_1": f"org:{i}", + }, ) # source: shazam occurs only once, so would be discarded from result set @@ -1366,7 +1727,10 @@ def test_funnel_correlation_with_event_properties_and_groups(self): "date_to": "2020-01-14", "aggregation_group_type_index": 1, "funnel_correlation_type": "event_with_properties", - "funnel_correlation_event_names": ["positively_related", "negatively_related"], + "funnel_correlation_event_names": [ + "positively_related", + "negatively_related", + ], } filter = Filter(data=filters) @@ -1427,7 +1791,10 @@ def test_funnel_correlation_with_event_properties_exclusions(self): for i in range(3): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) _create_event( team=self.team, @@ -1436,12 +1803,20 @@ def test_funnel_correlation_with_event_properties_exclusions(self): timestamp="2020-01-03T14:00:00Z", properties={"signup_source": "facebook", "blah": "value_bleh"}, ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", + ) # Atleast one person that fails, to ensure we get results _create_person(distinct_ids=[f"user_fail"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_fail", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_fail", + timestamp="2020-01-02T14:00:00Z", ) result = correlation._run()[0] @@ -1459,11 +1834,15 @@ def test_funnel_correlation_with_event_properties_exclusions(self): ], ) - self.assertEqual(len(self._get_actors_for_event(filter, "positively_related", {"blah": "value_bleh"})), 3) + self.assertEqual( + len(self._get_actors_for_event(filter, "positively_related", {"blah": "value_bleh"})), + 3, + ) # If you search for persons with a specific property, even if excluded earlier, you should get them self.assertEqual( - len(self._get_actors_for_event(filter, "positively_related", {"signup_source": "facebook"})), 3 + len(self._get_actors_for_event(filter, "positively_related", {"signup_source": "facebook"})), + 3, ) @also_test_with_materialized_columns(["$event_type", "signup_source"]) @@ -1487,7 +1866,10 @@ def test_funnel_correlation_with_event_properties_autocapture(self): for i in range(6): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) _create_event( team=self.team, @@ -1503,17 +1885,32 @@ def test_funnel_correlation_with_event_properties_autocapture(self): team=self.team, event="$autocapture", distinct_id=f"user_{i}", - elements=[Element(nth_of_type=1, nth_child=0, tag_name="button", text="Pay $10")], + elements=[ + Element( + nth_of_type=1, + nth_child=0, + tag_name="button", + text="Pay $10", + ) + ], timestamp="2020-01-03T14:00:00Z", properties={"signup_source": "facebook", "$event_type": "submit"}, ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", + ) # Atleast one person that fails, to ensure we get results _create_person(distinct_ids=[f"user_fail"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_fail", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_fail", + timestamp="2020-01-02T14:00:00Z", ) result = correlation._run()[0] @@ -1539,16 +1936,32 @@ def test_funnel_correlation_with_event_properties_autocapture(self): ], ) - self.assertEqual(len(self._get_actors_for_event(filter, "$autocapture", {"signup_source": "facebook"})), 3) - self.assertEqual(len(self._get_actors_for_event(filter, "$autocapture", {"$event_type": "click"})), 6) + self.assertEqual( + len(self._get_actors_for_event(filter, "$autocapture", {"signup_source": "facebook"})), + 3, + ) + self.assertEqual( + len(self._get_actors_for_event(filter, "$autocapture", {"$event_type": "click"})), + 6, + ) self.assertEqual( len( self._get_actors_for_event( filter, "$autocapture", [ - {"key": "tag_name", "operator": "exact", "type": "element", "value": "button"}, - {"key": "text", "operator": "exact", "type": "element", "value": "Pay $10"}, + { + "key": "tag_name", + "operator": "exact", + "type": "element", + "value": "button", + }, + { + "key": "text", + "operator": "exact", + "type": "element", + "value": "Pay $10", + }, ], ) ), @@ -1560,8 +1973,18 @@ def test_funnel_correlation_with_event_properties_autocapture(self): filter, "$autocapture", [ - {"key": "tag_name", "operator": "exact", "type": "element", "value": "a"}, - {"key": "href", "operator": "exact", "type": "element", "value": "/movie"}, + { + "key": "tag_name", + "operator": "exact", + "type": "element", + "value": "a", + }, + { + "key": "href", + "operator": "exact", + "type": "element", + "value": "/movie", + }, ], ) ), diff --git a/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py b/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py index 93d1f00223fdf..4617ffde3c2d5 100644 --- a/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py +++ b/ee/clickhouse/queries/funnels/test/test_funnel_correlations_persons.py @@ -6,11 +6,15 @@ from django.utils import timezone from freezegun import freeze_time -from ee.clickhouse.queries.funnels.funnel_correlation_persons import FunnelCorrelationActors +from ee.clickhouse.queries.funnels.funnel_correlation_persons import ( + FunnelCorrelationActors, +) from posthog.constants import INSIGHT_FUNNELS from posthog.models import Cohort, Filter from posthog.models.person import Person -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.tasks.calculate_cohort import insert_cohort_from_insight_filter from posthog.test.base import ( APIBaseTest, @@ -28,7 +32,6 @@ class TestClickhouseFunnelCorrelationsActors(ClickhouseTestMixin, APIBaseTest): - maxDiff = None def _setup_basic_test(self): @@ -55,7 +58,10 @@ def _setup_basic_test(self): if i % 2 == 0: events_by_person[person_id].append( - {"event": "positively_related", "timestamp": datetime(2020, 1, 3, 14)} + { + "event": "positively_related", + "timestamp": datetime(2020, 1, 3, 14), + } ) success_target_persons.append(str(person.uuid)) @@ -68,7 +74,10 @@ def _setup_basic_test(self): events_by_person[person_id] = [{"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14)}] if i % 2 == 0: events_by_person[person_id].append( - {"event": "negatively_related", "timestamp": datetime(2020, 1, 3, 14)} + { + "event": "negatively_related", + "timestamp": datetime(2020, 1, 3, 14), + } ) failure_target_persons.append(str(person.uuid)) @@ -90,15 +99,30 @@ def _setup_basic_test(self): ] journeys_for(events_by_person, self.team, create_people=False) - return filter, success_target_persons, failure_target_persons, person_fail, person_succ + return ( + filter, + success_target_persons, + failure_target_persons, + person_fail, + person_succ, + ) def test_basic_funnel_correlation_with_events(self): - filter, success_target_persons, failure_target_persons, person_fail, person_succ = self._setup_basic_test() + ( + filter, + success_target_persons, + failure_target_persons, + person_fail, + person_succ, + ) = self._setup_basic_test() # test positively_related successes filter = filter.shallow_clone( { - "funnel_correlation_person_entity": {"id": "positively_related", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "positively_related", + "type": "events", + }, "funnel_correlation_person_converted": "TrUe", } ) @@ -109,7 +133,10 @@ def test_basic_funnel_correlation_with_events(self): # test negatively_related failures filter = filter.shallow_clone( { - "funnel_correlation_person_entity": {"id": "negatively_related", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "negatively_related", + "type": "events", + }, "funnel_correlation_person_converted": "falsE", } ) @@ -121,7 +148,10 @@ def test_basic_funnel_correlation_with_events(self): # test positively_related failures filter = filter.shallow_clone( { - "funnel_correlation_person_entity": {"id": "positively_related", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "positively_related", + "type": "events", + }, "funnel_correlation_person_converted": "False", } ) @@ -132,7 +162,10 @@ def test_basic_funnel_correlation_with_events(self): # test negatively_related successes filter = filter.shallow_clone( { - "funnel_correlation_person_entity": {"id": "negatively_related", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "negatively_related", + "type": "events", + }, "funnel_correlation_person_converted": "trUE", } ) @@ -143,32 +176,46 @@ def test_basic_funnel_correlation_with_events(self): # test all positively_related filter = filter.shallow_clone( { - "funnel_correlation_person_entity": {"id": "positively_related", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "positively_related", + "type": "events", + }, "funnel_correlation_person_converted": None, } ) _, serialized_actors, _ = FunnelCorrelationActors(filter, self.team).get_actors() self.assertCountEqual( - [str(val["id"]) for val in serialized_actors], [*success_target_persons, str(person_fail.uuid)] + [str(val["id"]) for val in serialized_actors], + [*success_target_persons, str(person_fail.uuid)], ) # test all negatively_related filter = filter.shallow_clone( { - "funnel_correlation_person_entity": {"id": "negatively_related", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "negatively_related", + "type": "events", + }, "funnel_correlation_person_converted": None, } ) _, serialized_actors, _ = FunnelCorrelationActors(filter, self.team).get_actors() self.assertCountEqual( - [str(val["id"]) for val in serialized_actors], [*failure_target_persons, str(person_succ.uuid)] + [str(val["id"]) for val in serialized_actors], + [*failure_target_persons, str(person_succ.uuid)], ) @patch("posthog.tasks.calculate_cohort.insert_cohort_from_insight_filter.delay") def test_create_funnel_correlation_cohort(self, _insert_cohort_from_insight_filter): - filter, success_target_persons, failure_target_persons, person_fail, person_succ = self._setup_basic_test() + ( + filter, + success_target_persons, + failure_target_persons, + person_fail, + person_succ, + ) = self._setup_basic_test() params = { "events": [ @@ -179,7 +226,10 @@ def test_create_funnel_correlation_cohort(self, _insert_cohort_from_insight_filt "date_from": "2020-01-01", "date_to": "2020-01-14", "funnel_correlation_type": "events", - "funnel_correlation_person_entity": {"id": "positively_related", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "positively_related", + "type": "events", + }, "funnel_correlation_person_converted": "TrUe", } @@ -212,14 +262,19 @@ def test_create_funnel_correlation_cohort(self, _insert_cohort_from_insight_filt self.assertEqual(cohort.count, 5) def test_people_arent_returned_multiple_times(self): - people = journeys_for( { "user_1": [ {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14)}, - {"event": "positively_related", "timestamp": datetime(2020, 1, 3, 14)}, + { + "event": "positively_related", + "timestamp": datetime(2020, 1, 3, 14), + }, # duplicate event - {"event": "positively_related", "timestamp": datetime(2020, 1, 3, 14)}, + { + "event": "positively_related", + "timestamp": datetime(2020, 1, 3, 14), + }, {"event": "paid", "timestamp": datetime(2020, 1, 4, 14)}, ] }, @@ -236,7 +291,10 @@ def test_people_arent_returned_multiple_times(self): "date_from": "2020-01-01", "date_to": "2020-01-14", "funnel_correlation_type": "events", - "funnel_correlation_person_entity": {"id": "positively_related", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "positively_related", + "type": "events", + }, "funnel_correlation_person_converted": "TrUe", } ) @@ -289,9 +347,15 @@ def test_funnel_correlation_on_event_with_recordings(self): "date_from": "2021-01-01", "date_to": "2021-01-08", "funnel_correlation_type": "events", - "events": [{"id": "$pageview", "order": 0}, {"id": "insight analyzed", "order": 1}], + "events": [ + {"id": "$pageview", "order": 0}, + {"id": "insight analyzed", "order": 1}, + ], "include_recordings": "true", - "funnel_correlation_person_entity": {"id": "insight loaded", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "insight loaded", + "type": "events", + }, "funnel_correlation_person_converted": "True", } ) @@ -327,7 +391,10 @@ def test_funnel_correlation_on_event_with_recordings(self): {"id": "insight updated", "order": 2}, ], "include_recordings": "true", - "funnel_correlation_person_entity": {"id": "insight loaded", "type": "events"}, + "funnel_correlation_person_entity": { + "id": "insight loaded", + "type": "events", + }, "funnel_correlation_person_converted": "False", } ) @@ -387,10 +454,18 @@ def test_funnel_correlation_on_properties_with_recordings(self): "date_from": "2021-01-01", "date_to": "2021-01-08", "funnel_correlation_type": "properties", - "events": [{"id": "$pageview", "order": 0}, {"id": "insight analyzed", "order": 1}], + "events": [ + {"id": "$pageview", "order": 0}, + {"id": "insight analyzed", "order": 1}, + ], "include_recordings": "true", "funnel_correlation_property_values": [ - {"key": "foo", "value": "bar", "operator": "exact", "type": "person"} + { + "key": "foo", + "value": "bar", + "operator": "exact", + "type": "person", + } ], "funnel_correlation_person_converted": "True", } @@ -417,7 +492,6 @@ def test_funnel_correlation_on_properties_with_recordings(self): @snapshot_clickhouse_queries @freeze_time("2021-01-02 00:00:00.000Z") def test_strict_funnel_correlation_with_recordings(self): - # First use that successfully completes the strict funnel p1 = _create_person(distinct_ids=["user_1"], team=self.team, properties={"foo": "bar"}) _create_event( @@ -496,10 +570,18 @@ def test_strict_funnel_correlation_with_recordings(self): "date_to": "2021-01-08", "funnel_order_type": "strict", "funnel_correlation_type": "properties", - "events": [{"id": "$pageview", "order": 0}, {"id": "insight analyzed", "order": 1}], + "events": [ + {"id": "$pageview", "order": 0}, + {"id": "insight analyzed", "order": 1}, + ], "include_recordings": "true", "funnel_correlation_property_values": [ - {"key": "foo", "value": "bar", "operator": "exact", "type": "person"} + { + "key": "foo", + "value": "bar", + "operator": "exact", + "type": "person", + } ], "funnel_correlation_person_converted": "True", } @@ -532,10 +614,18 @@ def test_strict_funnel_correlation_with_recordings(self): "date_to": "2021-01-08", "funnel_order_type": "strict", "funnel_correlation_type": "properties", - "events": [{"id": "$pageview", "order": 0}, {"id": "insight analyzed", "order": 1}], + "events": [ + {"id": "$pageview", "order": 0}, + {"id": "insight analyzed", "order": 1}, + ], "include_recordings": "true", "funnel_correlation_property_values": [ - {"key": "foo", "value": "bar", "operator": "exact", "type": "person"} + { + "key": "foo", + "value": "bar", + "operator": "exact", + "type": "person", + } ], "funnel_correlation_person_converted": "False", } diff --git a/ee/clickhouse/queries/paths/paths.py b/ee/clickhouse/queries/paths/paths.py index 053bf982c6082..a5b9968da589e 100644 --- a/ee/clickhouse/queries/paths/paths.py +++ b/ee/clickhouse/queries/paths/paths.py @@ -61,10 +61,18 @@ def get_target_point_filter(self) -> str: return "" def get_target_clause(self) -> Tuple[str, Dict]: - params: Dict[str, Union[str, None]] = {"target_point": None, "secondary_target_point": None} + params: Dict[str, Union[str, None]] = { + "target_point": None, + "secondary_target_point": None, + } if self._filter.end_point and self._filter.start_point: - params.update({"target_point": self._filter.end_point, "secondary_target_point": self._filter.start_point}) + params.update( + { + "target_point": self._filter.end_point, + "secondary_target_point": self._filter.start_point, + } + ) clause = f""" , indexOf(compact_path, %(secondary_target_point)s) as start_target_index @@ -101,7 +109,10 @@ def get_path_query_funnel_cte(self, funnel_filter: Filter): include_timestamp=bool(self._filter.funnel_paths), include_preceding_timestamp=self._filter.funnel_paths == FUNNEL_PATH_BETWEEN_STEPS, ) - funnel_persons_query, funnel_persons_param = funnel_persons_generator.actor_query(limit_actors=False) + ( + funnel_persons_query, + funnel_persons_param, + ) = funnel_persons_generator.actor_query(limit_actors=False) funnel_persons_query_new_params = funnel_persons_query.replace("%(", "%(funnel_") new_funnel_params = {"funnel_" + str(key): val for key, val in funnel_persons_param.items()} self.params.update(new_funnel_params) @@ -112,7 +123,6 @@ def get_path_query_funnel_cte(self, funnel_filter: Filter): """ def get_session_threshold_clause(self) -> str: - if self.should_query_funnel(): self._funnel_filter = cast(Filter, self._funnel_filter) # typing mess diff --git a/ee/clickhouse/queries/related_actors_query.py b/ee/clickhouse/queries/related_actors_query.py index bf0c8431c7ef2..9c031a3b66221 100644 --- a/ee/clickhouse/queries/related_actors_query.py +++ b/ee/clickhouse/queries/related_actors_query.py @@ -9,7 +9,13 @@ from posthog.models.filters.utils import validate_group_type_index from posthog.models.group_type_mapping import GroupTypeMapping from posthog.models.property import GroupTypeIndex -from posthog.queries.actor_base_query import SerializedActor, SerializedGroup, SerializedPerson, get_groups, get_people +from posthog.queries.actor_base_query import ( + SerializedActor, + SerializedGroup, + SerializedPerson, + get_groups, + get_people, +) from posthog.queries.person_distinct_id_query import get_team_distinct_ids_query @@ -22,7 +28,12 @@ class RelatedActorsQuery: Two actors are considered related if they have had shared events in the past 90 days. """ - def __init__(self, team: Team, group_type_index: Optional[Union[GroupTypeIndex, str]], id: str): + def __init__( + self, + team: Team, + group_type_index: Optional[Union[GroupTypeIndex, str]], + id: str, + ): self.team = team self.group_type_index = validate_group_type_index("group_type_index", group_type_index) self.id = id diff --git a/ee/clickhouse/queries/retention/retention.py b/ee/clickhouse/queries/retention/retention.py index 249ff103f4086..6d97b9bcae71b 100644 --- a/ee/clickhouse/queries/retention/retention.py +++ b/ee/clickhouse/queries/retention/retention.py @@ -1,5 +1,9 @@ -from ee.clickhouse.queries.retention.retention_actors import ClickhouseRetentionActorsByPeriod -from ee.clickhouse.queries.retention.retention_event_query import ClickhouseRetentionEventsQuery +from ee.clickhouse.queries.retention.retention_actors import ( + ClickhouseRetentionActorsByPeriod, +) +from ee.clickhouse.queries.retention.retention_event_query import ( + ClickhouseRetentionEventsQuery, +) from posthog.queries.retention.retention import Retention diff --git a/ee/clickhouse/queries/retention/retention_actors.py b/ee/clickhouse/queries/retention/retention_actors.py index 4215604be90b2..48c16c67973b6 100644 --- a/ee/clickhouse/queries/retention/retention_actors.py +++ b/ee/clickhouse/queries/retention/retention_actors.py @@ -1,4 +1,6 @@ -from ee.clickhouse.queries.retention.retention_event_query import ClickhouseRetentionEventsQuery +from ee.clickhouse.queries.retention.retention_event_query import ( + ClickhouseRetentionEventsQuery, +) from posthog.models.filters.mixins.utils import cached_property from posthog.queries.retention.actors_query import RetentionActorsByPeriod diff --git a/ee/clickhouse/queries/stickiness/stickiness.py b/ee/clickhouse/queries/stickiness/stickiness.py index f311808ef3c08..65f48c57e3ef7 100644 --- a/ee/clickhouse/queries/stickiness/stickiness.py +++ b/ee/clickhouse/queries/stickiness/stickiness.py @@ -1,5 +1,9 @@ -from ee.clickhouse.queries.stickiness.stickiness_actors import ClickhouseStickinessActors -from ee.clickhouse.queries.stickiness.stickiness_event_query import ClickhouseStickinessEventsQuery +from ee.clickhouse.queries.stickiness.stickiness_actors import ( + ClickhouseStickinessActors, +) +from ee.clickhouse.queries.stickiness.stickiness_event_query import ( + ClickhouseStickinessEventsQuery, +) from posthog.queries.stickiness.stickiness import Stickiness diff --git a/ee/clickhouse/queries/stickiness/stickiness_actors.py b/ee/clickhouse/queries/stickiness/stickiness_actors.py index f62b9f479b9f5..0405aa8674295 100644 --- a/ee/clickhouse/queries/stickiness/stickiness_actors.py +++ b/ee/clickhouse/queries/stickiness/stickiness_actors.py @@ -1,4 +1,6 @@ -from ee.clickhouse.queries.stickiness.stickiness_event_query import ClickhouseStickinessEventsQuery +from ee.clickhouse.queries.stickiness.stickiness_event_query import ( + ClickhouseStickinessEventsQuery, +) from posthog.models.filters.mixins.utils import cached_property from posthog.queries.stickiness.stickiness_actors import StickinessActors diff --git a/ee/clickhouse/queries/test/test_breakdown_props.py b/ee/clickhouse/queries/test/test_breakdown_props.py index e0442cffadcb1..b937c63fed66f 100644 --- a/ee/clickhouse/queries/test/test_breakdown_props.py +++ b/ee/clickhouse/queries/test/test_breakdown_props.py @@ -6,7 +6,10 @@ from posthog.models.filters import Filter from posthog.models.group.util import create_group from posthog.models.group_type_mapping import GroupTypeMapping -from posthog.queries.breakdown_props import _to_bucketing_expression, get_breakdown_prop_values +from posthog.queries.breakdown_props import ( + _to_bucketing_expression, + get_breakdown_prop_values, +) from posthog.queries.trends.util import process_math from posthog.test.base import ( APIBaseTest, @@ -20,7 +23,8 @@ class TestBreakdownProps(ClickhouseTestMixin, APIBaseTest): @also_test_with_materialized_columns( - event_properties=["$host", "distinct_id"], person_properties=["$browser", "email"] + event_properties=["$host", "distinct_id"], + person_properties=["$browser", "email"], ) @snapshot_clickhouse_queries def test_breakdown_person_props(self): @@ -34,14 +38,29 @@ def test_breakdown_person_props(self): ) self.team.test_account_filters = [ - {"key": "email", "type": "person", "value": "posthog.com", "operator": "not_icontains"}, + { + "key": "email", + "type": "person", + "value": "posthog.com", + "operator": "not_icontains", + }, { "key": "$host", "type": "event", - "value": ["127.0.0.1:3000", "127.0.0.1:5000", "localhost:5000", "localhost:8000"], + "value": [ + "127.0.0.1:3000", + "127.0.0.1:5000", + "localhost:5000", + "localhost:8000", + ], "operator": "is_not", }, - {"key": "distinct_id", "type": "event", "value": "posthog.com", "operator": "not_icontains"}, + { + "key": "distinct_id", + "type": "event", + "value": "posthog.com", + "operator": "not_icontains", + }, ] self.team.save() with freeze_time("2020-01-04T13:01:01Z"): @@ -50,7 +69,14 @@ def test_breakdown_person_props(self): "insight": "FUNNELS", "properties": [], "filter_test_accounts": True, - "events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}], + "events": [ + { + "id": "$pageview", + "name": "$pageview", + "type": "events", + "order": 0, + } + ], "actions": [], "funnel_viz_type": "steps", "display": "FunnelViz", @@ -63,7 +89,10 @@ def test_breakdown_person_props(self): } ) res = get_breakdown_prop_values( - filter, Entity({"id": "$pageview", "type": "events"}), "count(*)", self.team + filter, + Entity({"id": "$pageview", "type": "events"}), + "count(*)", + self.team, ) self.assertEqual(res, ["test"]) @@ -86,7 +115,9 @@ def test_breakdown_person_props_with_entity_filter(self): ) cohort = Cohort.objects.create( - team=self.team, name="a", groups=[{"properties": [{"key": "$browser", "value": "test", "type": "person"}]}] + team=self.team, + name="a", + groups=[{"properties": [{"key": "$browser", "value": "test", "type": "person"}]}], ) cohort.calculate_people_ch(pending_version=0) @@ -123,7 +154,11 @@ def test_breakdown_person_props_with_entity_filter(self): @snapshot_clickhouse_queries def test_breakdown_person_props_with_entity_filter_and_or_props_with_partial_pushdown(self): - _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"$browser": "test", "$os": "test"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"$browser": "test", "$os": "test"}, + ) _create_event( team=self.team, event="$pageview", @@ -131,7 +166,11 @@ def test_breakdown_person_props_with_entity_filter_and_or_props_with_partial_pus timestamp="2020-01-02T12:00:00Z", properties={"key": "val"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"$browser": "test2", "$os": "test2"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"$browser": "test2", "$os": "test2"}, + ) _create_event( team=self.team, event="$pageview", @@ -139,7 +178,11 @@ def test_breakdown_person_props_with_entity_filter_and_or_props_with_partial_pus timestamp="2020-01-02T12:00:00Z", properties={"key": "val2"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"$browser": "test3", "$os": "test3"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"$browser": "test3", "$os": "test3"}, + ) _create_event( team=self.team, event="$pageview", @@ -154,7 +197,14 @@ def test_breakdown_person_props_with_entity_filter_and_or_props_with_partial_pus "name": "$pageview", "type": "events", "order": 0, - "properties": [{"key": "$browser", "type": "person", "value": "test", "operator": "icontains"}], + "properties": [ + { + "key": "$browser", + "type": "person", + "value": "test", + "operator": "icontains", + } + ], } ] with self.settings(USE_PRECALCULATED_CH_COHORT_PEOPLE=True): @@ -165,8 +215,18 @@ def test_breakdown_person_props_with_entity_filter_and_or_props_with_partial_pus "properties": { "type": "OR", "values": [ - {"key": "$os", "type": "person", "value": "test2", "operator": "exact"}, - {"key": "key", "type": "event", "value": "val", "operator": "exact"}, + { + "key": "$os", + "type": "person", + "value": "test2", + "operator": "exact", + }, + { + "key": "key", + "type": "event", + "value": "val", + "operator": "exact", + }, ], }, "filter_test_accounts": False, @@ -190,17 +250,43 @@ def test_breakdown_group_props(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:7", properties={"industry": "finance"}) create_group( - team_id=self.team.pk, group_type_index=0, group_key="org:8", properties={"industry": "another", "out": 1} + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:7", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:8", + properties={"industry": "another", "out": 1}, ) create_group( - team_id=self.team.pk, group_type_index=1, group_key="company:10", properties={"industry": "foobar"} + team_id=self.team.pk, + group_type_index=1, + group_key="company:10", + properties={"industry": "foobar"}, ) # :TRICKY: Test group type overlapping - create_group(team_id=self.team.pk, group_type_index=1, group_key="org:8", properties={"industry": "foobar"}) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="org:8", + properties={"industry": "foobar"}, + ) for org_index in range(5, 9): _create_event( @@ -221,7 +307,13 @@ def test_breakdown_group_props(self): "breakdown_limit": 5, "events": [{"id": "$pageview", "type": "events", "order": 0}], "properties": [ - {"key": "out", "value": "", "type": "group", "group_type_index": 0, "operator": "is_not_set"} + { + "key": "out", + "value": "", + "type": "group", + "group_type_index": 0, + "operator": "is_not_set", + } ], }, team=self.team, @@ -241,7 +333,13 @@ def test_breakdown_group_props(self): "properties": { "type": "AND", "values": [ - {"key": "out", "value": "", "type": "group", "group_type_index": 0, "operator": "is_not_set"} + { + "key": "out", + "value": "", + "type": "group", + "group_type_index": 0, + "operator": "is_not_set", + } ], }, } @@ -251,7 +349,11 @@ def test_breakdown_group_props(self): @snapshot_clickhouse_queries def test_breakdown_session_props(self): - _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"$browser": "test", "$os": "test"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"$browser": "test", "$os": "test"}, + ) # 20 second session that starts before the time range _create_event( @@ -299,8 +401,16 @@ def test_breakdown_session_props(self): @snapshot_clickhouse_queries def test_breakdown_with_math_property_session(self): - _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"$browser": "test", "$os": "test"}) - _create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"$browser": "mac", "$os": "test"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"$browser": "test", "$os": "test"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"$browser": "mac", "$os": "test"}, + ) # 20 second session that starts before the time range _create_event( @@ -412,10 +522,22 @@ def test_breakdown_with_math_property_session(self): [ (0, "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0,1)(value)))"), (1, "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0,1)(value)))"), - (2, "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.50,1.00)(value)))"), - (3, "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.33,0.67,1.00)(value)))"), - (5, "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.20,0.40,0.60,0.80,1.00)(value)))"), - (7, "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.14,0.29,0.43,0.57,0.71,0.86,1.00)(value)))"), + ( + 2, + "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.50,1.00)(value)))", + ), + ( + 3, + "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.33,0.67,1.00)(value)))", + ), + ( + 5, + "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.20,0.40,0.60,0.80,1.00)(value)))", + ), + ( + 7, + "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.14,0.29,0.43,0.57,0.71,0.86,1.00)(value)))", + ), ( 10, "arrayCompact(arrayMap(x -> floor(x, 2), quantiles(0.00,0.10,0.20,0.30,0.40,0.50,0.60,0.70,0.80,0.90,1.00)(value)))", @@ -423,7 +545,6 @@ def test_breakdown_with_math_property_session(self): ], ) def test_bucketing_expression(test_input, expected): - result = _to_bucketing_expression(test_input) assert result == expected diff --git a/ee/clickhouse/queries/test/test_cohort_query.py b/ee/clickhouse/queries/test/test_cohort_query.py index 8db8b1daea391..6b9d4cf7ce116 100644 --- a/ee/clickhouse/queries/test/test_cohort_query.py +++ b/ee/clickhouse/queries/test/test_cohort_query.py @@ -20,7 +20,14 @@ ) -def _make_event_sequence(team, distinct_id, interval_days, period_event_counts, event="$pageview", properties={}): +def _make_event_sequence( + team, + distinct_id, + interval_days, + period_event_counts, + event="$pageview", + properties={}, +): for period_index, event_count in enumerate(period_event_counts): for i in range(event_count): _create_event( @@ -44,15 +51,19 @@ def _create_cohort(**kwargs): class TestCohortQuery(ClickhouseTestMixin, BaseTest): @snapshot_clickhouse_queries def test_basic_query(self): - action1 = Action.objects.create(team=self.team, name="action1") ActionStep.objects.create( - event="$autocapture", action=action1, url="https://posthog.com/feedback/123", url_matching=ActionStep.EXACT + event="$autocapture", + action=action1, + url="https://posthog.com/feedback/123", + url_matching=ActionStep.EXACT, ) # satiesfies all conditions p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -71,7 +82,9 @@ def test_basic_query(self): # doesn't satisfy action _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -90,7 +103,9 @@ def test_basic_query(self): # doesn't satisfy property condition _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test", "email": "testXX@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test", "email": "testXX@posthog.com"}, ) _create_event( team=self.team, @@ -145,7 +160,11 @@ def test_basic_query(self): "value": "performed_event_first_time", "type": "behavioral", }, - {"key": "email", "value": "test@posthog.com", "type": "person"}, + { + "key": "email", + "value": "test@posthog.com", + "type": "person", + }, ], }, ], @@ -163,7 +182,9 @@ def test_basic_query(self): def test_performed_event(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -174,7 +195,9 @@ def test_performed_event(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -210,7 +233,9 @@ def test_performed_event(self): def test_performed_event_multiple(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -229,7 +254,9 @@ def test_performed_event_multiple(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -267,11 +294,15 @@ def test_performed_event_multiple(self): def test_performed_event_lte_1_times(self): _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -282,7 +313,9 @@ def test_performed_event_lte_1_times(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test3", "email": "test3@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test3", "email": "test3@posthog.com"}, ) _create_event( team=self.team, @@ -327,7 +360,9 @@ def test_performed_event_lte_1_times(self): def test_can_handle_many_performed_multiple_filters(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -338,7 +373,9 @@ def test_can_handle_many_performed_multiple_filters(self): ) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -349,7 +386,9 @@ def test_can_handle_many_performed_multiple_filters(self): ) p3 = _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test3", "email": "test3@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test3", "email": "test3@posthog.com"}, ) _create_event( team=self.team, @@ -427,7 +466,9 @@ def test_performed_event_zero_times_(self): def test_stopped_performing_event(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -438,7 +479,9 @@ def test_stopped_performing_event(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -500,13 +543,19 @@ def test_stopped_performing_event_raises_if_seq_date_later_than_date(self): def test_restarted_performing_event(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test2", "email": "test2@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test2", "email": "test2@posthog.com"}, ) _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test3", "email": "test3@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test3", "email": "test3@posthog.com"}, ) # P1 events (proper restarting sequence) @@ -609,10 +658,14 @@ def test_restarted_performing_event_raises_if_seq_date_later_than_date(self): def test_performed_event_first_time(self): _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test2", "email": "test2@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test2", "email": "test2@posthog.com"}, ) _create_event( team=self.team, @@ -660,7 +713,9 @@ def test_performed_event_first_time(self): def test_performed_event_regularly(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p1", 3, [1, 1, 1]) @@ -697,10 +752,14 @@ def test_performed_event_regularly(self): def test_performed_event_regularly_with_variable_event_counts_in_each_period(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test2", "email": "test2@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test2", "email": "test2@posthog.com"}, ) # p1 gets variable number of events in each period _make_event_sequence(self.team, "p1", 3, [0, 1, 2]) @@ -769,17 +828,25 @@ def test_performed_event_regularly_with_variable_event_counts_in_each_period(sel @snapshot_clickhouse_queries def test_person_props_only(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test1@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test1@posthog.com"}, ) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test2@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test2@posthog.com"}, ) p3 = _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test3", "email": "test3@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test3", "email": "test3@posthog.com"}, ) # doesn't match _create_person( - team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "test3", "email": "test4@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p4"], + properties={"name": "test3", "email": "test4@posthog.com"}, ) filter = Filter( @@ -790,15 +857,27 @@ def test_person_props_only(self): { "type": "OR", "values": [ - {"key": "email", "value": "test1@posthog.com", "type": "person"}, - {"key": "email", "value": "test2@posthog.com", "type": "person"}, + { + "key": "email", + "value": "test1@posthog.com", + "type": "person", + }, + { + "key": "email", + "value": "test2@posthog.com", + "type": "person", + }, ], }, { "type": "AND", "values": [ {"key": "name", "value": "test3", "type": "person"}, - {"key": "email", "value": "test3@posthog.com", "type": "person"}, + { + "key": "email", + "value": "test3@posthog.com", + "type": "person", + }, ], }, ], @@ -816,15 +895,19 @@ def test_person_props_only(self): @snapshot_clickhouse_queries def test_person_properties_with_pushdowns(self): - action1 = Action.objects.create(team=self.team, name="action1") ActionStep.objects.create( - event="$autocapture", action=action1, url="https://posthog.com/feedback/123", url_matching=ActionStep.EXACT + event="$autocapture", + action=action1, + url="https://posthog.com/feedback/123", + url_matching=ActionStep.EXACT, ) # satiesfies all conditions p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -843,7 +926,9 @@ def test_person_properties_with_pushdowns(self): # doesn't satisfy action _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -862,7 +947,9 @@ def test_person_properties_with_pushdowns(self): # satisfies special condition (not pushed down person property in OR group) p3 = _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "special", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "special", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -897,7 +984,11 @@ def test_person_properties_with_pushdowns(self): "value": "performed_event", "type": "behavioral", }, - {"key": "name", "value": "special", "type": "person"}, # this is NOT pushed down + { + "key": "name", + "value": "special", + "type": "person", + }, # this is NOT pushed down ], }, { @@ -911,7 +1002,11 @@ def test_person_properties_with_pushdowns(self): "value": "performed_event_first_time", "type": "behavioral", }, - {"key": "email", "value": "test@posthog.com", "type": "person"}, # this is pushed down + { + "key": "email", + "value": "test@posthog.com", + "type": "person", + }, # this is pushed down ], }, ], @@ -927,10 +1022,11 @@ def test_person_properties_with_pushdowns(self): @also_test_with_materialized_columns(person_properties=["$sample_field"]) @snapshot_clickhouse_queries def test_person(self): - # satiesfies all conditions p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "$sample_field": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "$sample_field": "test@posthog.com"}, ) filter = Filter( data={ @@ -945,7 +1041,11 @@ def test_person(self): "value": "performed_event", "type": "behavioral", }, - {"key": "$sample_field", "value": "test@posthog.com", "type": "person"}, + { + "key": "$sample_field", + "value": "test@posthog.com", + "type": "person", + }, ], } } @@ -1057,7 +1157,9 @@ def test_earliest_date_clause_removed_for_started_at_query(self): def test_negation(self): _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1068,7 +1170,9 @@ def test_negation(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1102,7 +1206,9 @@ def test_negation(self): def test_negation_with_simplify_filters(self): _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1113,7 +1219,9 @@ def test_negation_with_simplify_filters(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1124,7 +1232,9 @@ def test_negation_with_simplify_filters(self): ) p3 = _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1171,7 +1281,9 @@ def test_negation_with_simplify_filters(self): def test_negation_dynamic_time_bound_with_performed_event(self): # invalid dude because $pageview happened too early _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1191,7 +1303,9 @@ def test_negation_dynamic_time_bound_with_performed_event(self): # invalid dude because no new_view event _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1203,7 +1317,9 @@ def test_negation_dynamic_time_bound_with_performed_event(self): # valid dude because $pageview happened a long time ago p3 = _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1223,7 +1339,9 @@ def test_negation_dynamic_time_bound_with_performed_event(self): # valid dude because $pageview did not happen p4 = _create_person( - team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p4"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1269,7 +1387,9 @@ def test_negation_dynamic_time_bound_with_performed_event(self): def test_negation_dynamic_time_bound_with_performed_event_sequence(self): # invalid dude because $pageview sequence happened too early _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) # pageview sequence that happens today, and 2 days ago _make_event_sequence(self.team, "p1", 2, [1, 1]) @@ -1283,13 +1403,17 @@ def test_negation_dynamic_time_bound_with_performed_event_sequence(self): # invalid dude because no new_view event _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p2", 2, [1, 1]) # valid dude because $pageview sequence happened a long time ago p3 = _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1315,7 +1439,9 @@ def test_negation_dynamic_time_bound_with_performed_event_sequence(self): # valid dude because $pageview sequence did not happen p4 = _create_person( - team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p4"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1327,7 +1453,9 @@ def test_negation_dynamic_time_bound_with_performed_event_sequence(self): # valid dude because $pageview sequence did not complete, even if one pageview happened p5 = _create_person( - team_id=self.team.pk, distinct_ids=["p5"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p5"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1346,7 +1474,9 @@ def test_negation_dynamic_time_bound_with_performed_event_sequence(self): # valid dude because $pageview sequence delay was long enough, even if it happened too early p6 = _create_person( - team_id=self.team.pk, distinct_ids=["p6"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p6"], + properties={"name": "test", "email": "test@posthog.com"}, ) # pageview sequence that happens today, and 4 days ago _make_event_sequence(self.team, "p6", 4, [1, 1]) @@ -1395,7 +1525,11 @@ def test_negation_dynamic_time_bound_with_performed_event_sequence(self): self.assertCountEqual([p3.uuid, p4.uuid, p5.uuid, p6.uuid], [r[0] for r in res]) def test_cohort_filter(self): - p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"}) + p1 = _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "name": "test"}, + ) cohort = _create_cohort( team=self.team, name="cohort1", @@ -1404,7 +1538,12 @@ def test_cohort_filter(self): flush_persons_and_events() filter = Filter( - data={"properties": {"type": "AND", "values": [{"key": "id", "value": cohort.pk, "type": "cohort"}]}} + data={ + "properties": { + "type": "AND", + "values": [{"key": "id", "value": cohort.pk, "type": "cohort"}], + } + } ) q, params = CohortQuery(filter=filter, team=self.team).get_query() @@ -1417,7 +1556,16 @@ def test_faulty_type(self): team=self.team, name="cohort1", groups=[ - {"properties": [{"key": "email", "type": "event", "value": ["fake@test.com"], "operator": "exact"}]} + { + "properties": [ + { + "key": "email", + "type": "event", + "value": ["fake@test.com"], + "operator": "exact", + } + ] + } ], ) @@ -1428,7 +1576,14 @@ def test_faulty_type(self): "values": [ { "type": "AND", - "values": [{"key": "email", "value": ["fake@test.com"], "operator": "exact", "type": "person"}], + "values": [ + { + "key": "email", + "value": ["fake@test.com"], + "operator": "exact", + "type": "person", + } + ], } ], }, @@ -1438,7 +1593,17 @@ def test_missing_type(self): cohort = _create_cohort( team=self.team, name="cohort1", - groups=[{"properties": [{"key": "email", "value": ["fake@test.com"], "operator": "exact"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "value": ["fake@test.com"], + "operator": "exact", + } + ] + } + ], ) self.assertEqual( @@ -1448,7 +1613,14 @@ def test_missing_type(self): "values": [ { "type": "AND", - "values": [{"key": "email", "value": ["fake@test.com"], "operator": "exact", "type": "person"}], + "values": [ + { + "key": "email", + "value": ["fake@test.com"], + "operator": "exact", + "type": "person", + } + ], } ], }, @@ -1459,7 +1631,15 @@ def test_old_old_style_properties(self): team=self.team, name="cohort1", groups=[ - {"properties": [{"key": "email", "value": ["fake@test.com"], "operator": "exact"}]}, + { + "properties": [ + { + "key": "email", + "value": ["fake@test.com"], + "operator": "exact", + } + ] + }, {"properties": {"abra": "cadabra", "name": "alakazam"}}, ], ) @@ -1471,7 +1651,14 @@ def test_old_old_style_properties(self): "values": [ { "type": "AND", - "values": [{"key": "email", "value": ["fake@test.com"], "operator": "exact", "type": "person"}], + "values": [ + { + "key": "email", + "value": ["fake@test.com"], + "operator": "exact", + "type": "person", + } + ], }, { "type": "AND", @@ -1485,7 +1672,11 @@ def test_old_old_style_properties(self): ) def test_precalculated_cohort_filter(self): - p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"}) + p1 = _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "name": "test"}, + ) cohort = _create_cohort( team=self.team, name="cohort1", @@ -1497,7 +1688,13 @@ def test_precalculated_cohort_filter(self): data={ "properties": { "type": "OR", - "values": [{"key": "id", "value": cohort.pk, "type": "precalculated-cohort"}], + "values": [ + { + "key": "id", + "value": cohort.pk, + "type": "precalculated-cohort", + } + ], } } ) @@ -1531,7 +1728,11 @@ def test_precalculated_cohort_filter_with_extra_filters(self): "properties": { "type": "OR", "values": [ - {"key": "id", "value": cohort.pk, "type": "precalculated-cohort"}, + { + "key": "id", + "value": cohort.pk, + "type": "precalculated-cohort", + }, {"key": "name", "value": "test2", "type": "person"}, ], } @@ -1550,7 +1751,11 @@ def test_precalculated_cohort_filter_with_extra_filters(self): @snapshot_clickhouse_queries def test_cohort_filter_with_extra(self): - p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"}) + p1 = _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "name": "test"}, + ) cohort = _create_cohort( team=self.team, name="cohort1", @@ -1558,7 +1763,9 @@ def test_cohort_filter_with_extra(self): ) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1622,24 +1829,34 @@ def test_cohort_filter_with_extra(self): def test_cohort_filter_with_another_cohort_with_event_sequence(self): # passes filters for cohortCeption, but not main cohort _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@gmail.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@gmail.com"}, ) _make_event_sequence(self.team, "p1", 2, [1, 1]) # passes filters for cohortCeption and main cohort p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p2", 2, [1, 1]) _make_event_sequence(self.team, "p2", 6, [1, 1], event="$new_view") # passes filters for neither cohortCeption nor main cohort - _create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"email": "test@posthog.com"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"email": "test@posthog.com"}, + ) _make_event_sequence(self.team, "p3", 2, [1, 1]) # passes filters for mainCohort but not cohortCeption _create_person( - team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p4"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p4", 6, [1, 1]) _make_event_sequence(self.team, "p4", 6, [1, 1], event="$new_view") @@ -1700,14 +1917,22 @@ def test_cohort_filter_with_another_cohort_with_event_sequence(self): @snapshot_clickhouse_queries def test_static_cohort_filter(self): - - p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"}) + p1 = _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "name": "test"}, + ) cohort = _create_cohort(team=self.team, name="cohort1", groups=[], is_static=True) flush_persons_and_events() cohort.insert_users_by_list(["p1"]) filter = Filter( - data={"properties": {"type": "OR", "values": [{"key": "id", "value": cohort.pk, "type": "static-cohort"}]}} + data={ + "properties": { + "type": "OR", + "values": [{"key": "id", "value": cohort.pk, "type": "static-cohort"}], + } + } ) q, params = CohortQuery(filter=filter, team=self.team).get_query() @@ -1717,11 +1942,17 @@ def test_static_cohort_filter(self): @snapshot_clickhouse_queries def test_static_cohort_filter_with_extra(self): - p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"}) + p1 = _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "name": "test"}, + ) cohort = _create_cohort(team=self.team, name="cohort1", groups=[], is_static=True) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -1785,13 +2016,17 @@ def test_static_cohort_filter_with_extra(self): @snapshot_clickhouse_queries def test_performed_event_sequence(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p1", 2, [1, 1]) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -1833,20 +2068,31 @@ def test_performed_event_sequence(self): @also_test_with_materialized_columns(event_properties=["$current_url"]) def test_performed_event_sequence_with_action(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) action1 = Action.objects.create(team=self.team, name="action1") ActionStep.objects.create( - event="$pageview", action=action1, url="https://posthog.com/feedback/123", url_matching=ActionStep.EXACT + event="$pageview", + action=action1, + url="https://posthog.com/feedback/123", + url_matching=ActionStep.EXACT, ) _make_event_sequence( - self.team, "p1", 2, [1, 1], properties={"$current_url": "https://posthog.com/feedback/123"} + self.team, + "p1", + 2, + [1, 1], + properties={"$current_url": "https://posthog.com/feedback/123"}, ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -1887,13 +2133,17 @@ def test_performed_event_sequence_with_action(self): def test_performed_event_sequence_with_restarted(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p1", 2, [1, 1]) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -1951,7 +2201,9 @@ def test_performed_event_sequence_with_restarted(self): def test_performed_event_sequence_with_extra_conditions(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p1", 2, [1, 1]) @@ -1973,7 +2225,9 @@ def test_performed_event_sequence_with_extra_conditions(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -2025,7 +2279,9 @@ def test_performed_event_sequence_with_extra_conditions(self): @snapshot_clickhouse_queries def test_performed_event_sequence_with_person_properties(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p1", 2, [1, 1]) @@ -2047,7 +2303,9 @@ def test_performed_event_sequence_with_person_properties(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -2059,7 +2317,9 @@ def test_performed_event_sequence_with_person_properties(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test22", "email": "test22@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test22", "email": "test22@posthog.com"}, ) _make_event_sequence(self.team, "p3", 2, [1, 1]) @@ -2109,7 +2369,11 @@ def test_performed_event_sequence_with_person_properties(self): "value": "performed_event_multiple", "type": "behavioral", }, - {"key": "email", "value": "test@posthog.com", "type": "person"}, # pushed down + { + "key": "email", + "value": "test@posthog.com", + "type": "person", + }, # pushed down ], } } @@ -2122,7 +2386,9 @@ def test_performed_event_sequence_with_person_properties(self): def test_multiple_performed_event_sequence(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _make_event_sequence(self.team, "p1", 2, [1, 1]) @@ -2144,7 +2410,9 @@ def test_multiple_performed_event_sequence(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -2206,7 +2474,9 @@ def test_multiple_performed_event_sequence(self): @snapshot_clickhouse_queries def test_performed_event_sequence_and_clause_with_additional_event(self): p1 = _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -2226,7 +2496,9 @@ def test_performed_event_sequence_and_clause_with_additional_event(self): ) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -2277,11 +2549,17 @@ def test_performed_event_sequence_and_clause_with_additional_event(self): @snapshot_clickhouse_queries def test_unwrapping_static_cohort_filter_hidden_in_layers_of_cohorts(self): - _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "name": "test"}, + ) cohort_static = _create_cohort(team=self.team, name="cohort static", groups=[], is_static=True) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( team=self.team, @@ -2346,7 +2624,11 @@ def test_unwrapping_static_cohort_filter_hidden_in_layers_of_cohorts(self): "properties": { "type": "OR", "values": [ - {"key": "id", "value": other_cohort.pk, "type": "cohort"}, # p3 fits in here + { + "key": "id", + "value": other_cohort.pk, + "type": "cohort", + }, # p3 fits in here { "key": "$pageview", "event_type": "events", @@ -2368,7 +2650,9 @@ def test_unwrapping_static_cohort_filter_hidden_in_layers_of_cohorts(self): def test_unwrap_with_negated_cohort(self): _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test2", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test2", "email": "test@posthog.com"}, ) _create_event( @@ -2387,7 +2671,9 @@ def test_unwrap_with_negated_cohort(self): ) p2 = _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -2399,7 +2685,9 @@ def test_unwrap_with_negated_cohort(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test2", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test2", "email": "test@posthog.com"}, ) _create_event( @@ -2446,8 +2734,18 @@ def test_unwrap_with_negated_cohort(self): "value": "performed_event", "type": "behavioral", }, - {"key": "name", "value": "test2", "type": "person", "negation": True}, - {"key": "id", "value": cohort1.pk, "type": "cohort", "negation": True}, + { + "key": "name", + "value": "test2", + "type": "person", + "negation": True, + }, + { + "key": "id", + "value": cohort1.pk, + "type": "cohort", + "negation": True, + }, ], } }, @@ -2470,7 +2768,9 @@ def test_unwrap_with_negated_cohort(self): def test_unwrap_multiple_levels(self): _create_person( - team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test2", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test2", "email": "test@posthog.com"}, ) _create_event( @@ -2489,7 +2789,9 @@ def test_unwrap_multiple_levels(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "test", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"name": "test", "email": "test@posthog.com"}, ) _create_event( @@ -2501,7 +2803,9 @@ def test_unwrap_multiple_levels(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "test2", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p3"], + properties={"name": "test2", "email": "test@posthog.com"}, ) _create_event( @@ -2513,7 +2817,9 @@ def test_unwrap_multiple_levels(self): ) p4 = _create_person( - team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "test3", "email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["p4"], + properties={"name": "test3", "email": "test@posthog.com"}, ) _create_event( @@ -2560,8 +2866,18 @@ def test_unwrap_multiple_levels(self): "value": "performed_event", "type": "behavioral", }, - {"key": "name", "value": "test2", "type": "person", "negation": True}, - {"key": "id", "value": cohort1.pk, "type": "cohort", "negation": True}, + { + "key": "name", + "value": "test2", + "type": "person", + "negation": True, + }, + { + "key": "id", + "value": cohort1.pk, + "type": "cohort", + "negation": True, + }, ], } }, @@ -2576,14 +2892,24 @@ def test_unwrap_multiple_levels(self): "type": "AND", "values": [ {"key": "name", "value": "test3", "type": "person"}, - {"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True}, + { + "key": "id", + "value": cohort2.pk, + "type": "cohort", + "negation": True, + }, ], } }, ) filter = Filter( - data={"properties": {"type": "OR", "values": [{"key": "id", "value": cohort3.pk, "type": "cohort"}]}}, + data={ + "properties": { + "type": "OR", + "values": [{"key": "id", "value": cohort3.pk, "type": "cohort"}], + } + }, team=self.team, ) @@ -2595,7 +2921,6 @@ def test_unwrap_multiple_levels(self): class TestCohortNegationValidation(BaseTest): def test_basic_valid_negation_tree(self): - property_group = PropertyGroup( type=PropertyOperatorType.AND, values=[ @@ -2609,22 +2934,30 @@ def test_basic_valid_negation_tree(self): self.assertEqual(has_reg, True) def test_valid_negation_tree_with_extra_layers(self): - property_group = PropertyGroup( type=PropertyOperatorType.OR, values=[ PropertyGroup( - type=PropertyOperatorType.AND, values=[Property(key="name", value="test", type="person")] + type=PropertyOperatorType.AND, + values=[Property(key="name", value="test", type="person")], ), PropertyGroup( type=PropertyOperatorType.AND, values=[ PropertyGroup( type=PropertyOperatorType.OR, - values=[Property(key="email", value="xxx", type="person", negation=True)], + values=[ + Property( + key="email", + value="xxx", + type="person", + negation=True, + ) + ], ), PropertyGroup( - type=PropertyOperatorType.OR, values=[Property(key="email", value="xxx", type="person")] + type=PropertyOperatorType.OR, + values=[Property(key="email", value="xxx", type="person")], ), ], ), @@ -2636,23 +2969,37 @@ def test_valid_negation_tree_with_extra_layers(self): self.assertEqual(has_reg, True) def test_invalid_negation_tree_with_extra_layers(self): - property_group = PropertyGroup( type=PropertyOperatorType.OR, values=[ PropertyGroup( - type=PropertyOperatorType.AND, values=[Property(key="name", value="test", type="person")] + type=PropertyOperatorType.AND, + values=[Property(key="name", value="test", type="person")], ), PropertyGroup( type=PropertyOperatorType.AND, values=[ PropertyGroup( type=PropertyOperatorType.OR, - values=[Property(key="email", value="xxx", type="person", negation=True)], + values=[ + Property( + key="email", + value="xxx", + type="person", + negation=True, + ) + ], ), PropertyGroup( type=PropertyOperatorType.OR, - values=[Property(key="email", value="xxx", type="person", negation=True)], + values=[ + Property( + key="email", + value="xxx", + type="person", + negation=True, + ) + ], ), ], ), @@ -2664,21 +3011,37 @@ def test_invalid_negation_tree_with_extra_layers(self): self.assertEqual(has_reg, True) def test_valid_negation_tree_with_extra_layers_recombining_at_top(self): - property_group = PropertyGroup( type=PropertyOperatorType.AND, # top level AND protects the 2 negations from being invalid values=[ - PropertyGroup(type=PropertyOperatorType.OR, values=[Property(key="name", value="test", type="person")]), + PropertyGroup( + type=PropertyOperatorType.OR, + values=[Property(key="name", value="test", type="person")], + ), PropertyGroup( type=PropertyOperatorType.AND, values=[ PropertyGroup( type=PropertyOperatorType.OR, - values=[Property(key="email", value="xxx", type="person", negation=True)], + values=[ + Property( + key="email", + value="xxx", + type="person", + negation=True, + ) + ], ), PropertyGroup( type=PropertyOperatorType.OR, - values=[Property(key="email", value="xxx", type="person", negation=True)], + values=[ + Property( + key="email", + value="xxx", + type="person", + negation=True, + ) + ], ), ], ), @@ -2690,7 +3053,6 @@ def test_valid_negation_tree_with_extra_layers_recombining_at_top(self): self.assertEqual(has_reg, True) def test_invalid_negation_tree_no_positive_filter(self): - property_group = PropertyGroup( type=PropertyOperatorType.AND, values=[ @@ -2703,11 +3065,25 @@ def test_invalid_negation_tree_no_positive_filter(self): values=[ PropertyGroup( type=PropertyOperatorType.OR, - values=[Property(key="email", value="xxx", type="person", negation=True)], + values=[ + Property( + key="email", + value="xxx", + type="person", + negation=True, + ) + ], ), PropertyGroup( type=PropertyOperatorType.OR, - values=[Property(key="email", value="xxx", type="person", negation=True)], + values=[ + Property( + key="email", + value="xxx", + type="person", + negation=True, + ) + ], ), ], ), @@ -2719,7 +3095,6 @@ def test_invalid_negation_tree_no_positive_filter(self): self.assertEqual(has_reg, False) def test_empty_property_group(self): - property_group = PropertyGroup(type=PropertyOperatorType.AND, values=[]) # type: ignore has_pending_neg, has_reg = check_negation_clause(property_group) @@ -2727,9 +3102,9 @@ def test_empty_property_group(self): self.assertEqual(has_reg, False) def test_basic_invalid_negation_tree(self): - property_group = PropertyGroup( - type=PropertyOperatorType.AND, values=[Property(key="email", value="xxx", type="person", negation=True)] + type=PropertyOperatorType.AND, + values=[Property(key="email", value="xxx", type="person", negation=True)], ) has_pending_neg, has_reg = check_negation_clause(property_group) @@ -2737,9 +3112,9 @@ def test_basic_invalid_negation_tree(self): self.assertEqual(has_reg, False) def test_basic_valid_negation_tree_with_no_negations(self): - property_group = PropertyGroup( - type=PropertyOperatorType.AND, values=[Property(key="name", value="test", type="person")] + type=PropertyOperatorType.AND, + values=[Property(key="name", value="test", type="person")], ) has_pending_neg, has_reg = check_negation_clause(property_group) diff --git a/ee/clickhouse/queries/test/test_column_optimizer.py b/ee/clickhouse/queries/test/test_column_optimizer.py index 1ba0a221eadfe..e30ed400614e6 100644 --- a/ee/clickhouse/queries/test/test_column_optimizer.py +++ b/ee/clickhouse/queries/test/test_column_optimizer.py @@ -2,14 +2,24 @@ from ee.clickhouse.queries.column_optimizer import EnterpriseColumnOptimizer from posthog.models import Action, ActionStep from posthog.models.filters import Filter, RetentionFilter -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, cleanup_materialized_columns +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + cleanup_materialized_columns, +) PROPERTIES_OF_ALL_TYPES = [ {"key": "event_prop", "value": ["foo", "bar"], "type": "event"}, {"key": "person_prop", "value": "efg", "type": "person"}, {"key": "id", "value": 1, "type": "cohort"}, {"key": "tag_name", "value": ["label"], "operator": "exact", "type": "element"}, - {"key": "group_prop", "value": ["value"], "operator": "exact", "type": "group", "group_type_index": 2}, + { + "key": "group_prop", + "value": ["value"], + "operator": "exact", + "type": "group", + "group_type_index": 2, + }, ] BASE_FILTER = Filter({"events": [{"id": "$pageview", "type": "events", "order": 0}]}) @@ -63,21 +73,32 @@ def test_properties_used_in_filter(self): self.assertEqual(properties_used_in_filter(filter), {}) filter = BASE_FILTER.shallow_clone( - {"breakdown": "some_prop", "breakdown_type": "group", "breakdown_group_type_index": 1} + { + "breakdown": "some_prop", + "breakdown_type": "group", + "breakdown_group_type_index": 1, + } ) self.assertEqual(properties_used_in_filter(filter), {("some_prop", "group", 1): 1}) # Funnel Correlation cases filter = BASE_FILTER.shallow_clone( - {"funnel_correlation_type": "events", "funnel_correlation_names": ["random_column"]} + { + "funnel_correlation_type": "events", + "funnel_correlation_names": ["random_column"], + } ) self.assertEqual(properties_used_in_filter(filter), {}) filter = BASE_FILTER.shallow_clone( - {"funnel_correlation_type": "properties", "funnel_correlation_names": ["random_column", "$browser"]} + { + "funnel_correlation_type": "properties", + "funnel_correlation_names": ["random_column", "$browser"], + } ) self.assertEqual( - properties_used_in_filter(filter), {("random_column", "person", None): 1, ("$browser", "person", None): 1} + properties_used_in_filter(filter), + {("random_column", "person", None): 1, ("$browser", "person", None): 1}, ) filter = BASE_FILTER.shallow_clone( @@ -88,7 +109,8 @@ def test_properties_used_in_filter(self): } ) self.assertEqual( - properties_used_in_filter(filter), {("random_column", "group", 2): 1, ("$browser", "group", 2): 1} + properties_used_in_filter(filter), + {("random_column", "group", 2): 1, ("$browser", "group", 2): 1}, ) filter = BASE_FILTER.shallow_clone({"funnel_correlation_type": "properties"}) @@ -135,13 +157,27 @@ def test_properties_used_in_filter(self): ) self.assertEqual(properties_used_in_filter(filter), {("$group_1", "event", None): 1}) - filter = Filter(data={"events": [{"id": "$pageview", "type": "events", "order": 0, "math": "unique_session"}]}) + filter = Filter( + data={ + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "unique_session", + } + ] + } + ) self.assertEqual(properties_used_in_filter(filter), {("$session_id", "event", None): 1}) def test_properties_used_in_filter_with_actions(self): action = Action.objects.create(team=self.team) ActionStep.objects.create( - event="$autocapture", action=action, url="https://example.com/donate", url_matching=ActionStep.EXACT + event="$autocapture", + action=action, + url="https://example.com/donate", + url_matching=ActionStep.EXACT, ) ActionStep.objects.create( action=action, @@ -205,7 +241,12 @@ def test_materialized_columns_checks_person_on_events(self): "type": "group", "group_type_index": 0, }, - {"key": "person_prop", "value": ["value"], "operator": "exact", "type": "person"}, + { + "key": "person_prop", + "value": ["value"], + "operator": "exact", + "type": "person", + }, ] } ), @@ -213,23 +254,35 @@ def test_materialized_columns_checks_person_on_events(self): ) self.assertEqual(optimizer().person_on_event_columns_to_query, {"person_properties"}) - self.assertEqual(optimizer().group_on_event_columns_to_query, {"group0_properties", "group2_properties"}) + self.assertEqual( + optimizer().group_on_event_columns_to_query, + {"group0_properties", "group2_properties"}, + ) # materialising the props on `person` or `group` table should make no difference materialize("person", "person_prop") materialize("groups", "group_prop", table_column="group_properties") self.assertEqual(optimizer().person_on_event_columns_to_query, {"person_properties"}) - self.assertEqual(optimizer().group_on_event_columns_to_query, {"group0_properties", "group2_properties"}) + self.assertEqual( + optimizer().group_on_event_columns_to_query, + {"group0_properties", "group2_properties"}, + ) materialize("events", "person_prop", table_column="person_properties") materialize("events", "group_prop", table_column="group0_properties") self.assertEqual(optimizer().person_on_event_columns_to_query, {"mat_pp_person_prop"}) - self.assertEqual(optimizer().group_on_event_columns_to_query, {"mat_gp0_group_prop", "group2_properties"}) + self.assertEqual( + optimizer().group_on_event_columns_to_query, + {"mat_gp0_group_prop", "group2_properties"}, + ) materialize("events", "group_prop", table_column="group2_properties") - self.assertEqual(optimizer().group_on_event_columns_to_query, {"mat_gp0_group_prop", "mat_gp2_group_prop"}) + self.assertEqual( + optimizer().group_on_event_columns_to_query, + {"mat_gp0_group_prop", "mat_gp2_group_prop"}, + ) def test_should_query_element_chain_column(self): should_query_elements_chain_column = lambda filter: EnterpriseColumnOptimizer( @@ -241,25 +294,46 @@ def test_should_query_element_chain_column(self): self.assertEqual(should_query_elements_chain_column(FILTER_WITH_GROUPS), True) filter = Filter( - data={"events": [{"id": "$pageview", "type": "events", "order": 0, "properties": PROPERTIES_OF_ALL_TYPES}]} + data={ + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "properties": PROPERTIES_OF_ALL_TYPES, + } + ] + } ) self.assertEqual(should_query_elements_chain_column(filter), True) def test_should_query_element_chain_column_with_actions(self): action = Action.objects.create(team=self.team) ActionStep.objects.create( - event="$autocapture", action=action, url="https://example.com/donate", url_matching=ActionStep.EXACT + event="$autocapture", + action=action, + url="https://example.com/donate", + url_matching=ActionStep.EXACT, ) filter = Filter(data={"actions": [{"id": action.id, "math": "dau"}]}) - self.assertEqual(EnterpriseColumnOptimizer(filter, self.team.id).should_query_elements_chain_column, False) + self.assertEqual( + EnterpriseColumnOptimizer(filter, self.team.id).should_query_elements_chain_column, + False, + ) ActionStep.objects.create(action=action, event="$autocapture", tag_name="button", text="Pay $10") - self.assertEqual(EnterpriseColumnOptimizer(filter, self.team.id).should_query_elements_chain_column, True) + self.assertEqual( + EnterpriseColumnOptimizer(filter, self.team.id).should_query_elements_chain_column, + True, + ) filter = BASE_FILTER.shallow_clone({"exclusions": [{"id": action.id, "type": "actions"}]}) - self.assertEqual(EnterpriseColumnOptimizer(filter, self.team.id).should_query_elements_chain_column, True) + self.assertEqual( + EnterpriseColumnOptimizer(filter, self.team.id).should_query_elements_chain_column, + True, + ) def test_group_types_to_query(self): group_types_to_query = lambda filter: EnterpriseColumnOptimizer(filter, self.team.id).group_types_to_query diff --git a/ee/clickhouse/queries/test/test_event_query.py b/ee/clickhouse/queries/test/test_event_query.py index bc75851d778d1..767dc237bd252 100644 --- a/ee/clickhouse/queries/test/test_event_query.py +++ b/ee/clickhouse/queries/test/test_event_query.py @@ -38,7 +38,12 @@ def _create_sample_data(self): distinct_id = "user_one_{}".format(self.team.pk) _create_person(distinct_ids=[distinct_id], team=self.team) - _create_event(event="viewed", distinct_id=distinct_id, team=self.team, timestamp="2021-05-01 00:00:00") + _create_event( + event="viewed", + distinct_id=distinct_id, + team=self.team, + timestamp="2021-05-01 00:00:00", + ) def _run_query(self, filter: Filter, entity=None): entity = entity or filter.entities[0] @@ -73,7 +78,12 @@ def test_person_properties_filter(self): "date_to": "2021-05-07 00:00:00", "events": [{"id": "viewed", "order": 0}], "properties": [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"}, + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + }, {"key": "key", "value": "val"}, ], } @@ -88,14 +98,23 @@ def test_person_properties_filter(self): "id": "viewed", "type": "events", "properties": [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"}, + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + }, {"key": "key", "value": "val"}, ], } ) filter = Filter( - data={"date_from": "2021-05-01 00:00:00", "date_to": "2021-05-07 00:00:00", "events": [entity.to_dict()]} + data={ + "date_from": "2021-05-01 00:00:00", + "date_to": "2021-05-07 00:00:00", + "events": [entity.to_dict()], + } ) self._run_query(filter, entity) @@ -107,7 +126,14 @@ def test_event_properties_filter(self): "date_from": "2021-05-01 00:00:00", "date_to": "2021-05-07 00:00:00", "events": [{"id": "viewed", "order": 0}], - "properties": [{"key": "some_key", "value": "test_val", "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "some_key", + "value": "test_val", + "operator": "exact", + "type": "event", + } + ], } ) @@ -127,7 +153,14 @@ def test_event_properties_filter(self): { "id": "viewed", "type": "events", - "properties": [{"key": "some_key", "value": "test_val", "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "some_key", + "value": "test_val", + "operator": "exact", + "type": "event", + } + ], } ) @@ -167,16 +200,30 @@ def test_entity_filtered_by_cohort(self): "date_from": "2021-05-01 00:00:00", "date_to": "2021-05-07 00:00:00", "events": [ - {"id": "$pageview", "order": 0, "properties": [{"key": "id", "type": "cohort", "value": cohort.pk}]} + { + "id": "$pageview", + "order": 0, + "properties": [{"key": "id", "type": "cohort", "value": cohort.pk}], + } ], } ) Person.objects.create(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test"}) - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-02T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-02T12:00:00Z", + ) Person.objects.create(team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "foo"}) - _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-02T12:01:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2020-01-02T12:01:00Z", + ) self._run_query(filter) @@ -218,7 +265,11 @@ def test_account_filters(self): self.team.save() filter = Filter( - data={"events": [{"id": "event_name", "order": 0}], "filter_test_accounts": True}, team=self.team + data={ + "events": [{"id": "event_name", "order": 0}], + "filter_test_accounts": True, + }, + team=self.team, ) self._run_query(filter) @@ -233,7 +284,9 @@ def test_action_with_person_property_filter(self): action = Action.objects.create(team=self.team, name="action1") ActionStep.objects.create( - event="event_name", action=action, properties=[{"key": "name", "type": "person", "value": "John"}] + event="event_name", + action=action, + properties=[{"key": "name", "type": "person", "value": "John"}], ) filter = Filter(data={"actions": [{"id": action.id, "type": "actions", "order": 0}]}) @@ -298,7 +351,12 @@ def test_element(self): nth_child=1, nth_of_type=0, ), - Element(tag_name="button", attr_class=["btn", "btn-primary"], nth_child=0, nth_of_type=0), + Element( + tag_name="button", + attr_class=["btn", "btn-primary"], + nth_child=0, + nth_of_type=0, + ), Element(tag_name="div", nth_child=0, nth_of_type=0), Element(tag_name="label", nth_child=0, nth_of_type=0, attr_id="nested"), ], @@ -318,7 +376,12 @@ def test_element(self): nth_child=1, nth_of_type=0, ), - Element(tag_name="button", attr_class=["btn", "btn-secondary"], nth_child=0, nth_of_type=0), + Element( + tag_name="button", + attr_class=["btn", "btn-secondary"], + nth_child=0, + nth_of_type=0, + ), Element(tag_name="div", nth_child=0, nth_of_type=0), Element(tag_name="img", nth_child=0, nth_of_type=0, attr_id="nested"), ], @@ -327,7 +390,14 @@ def test_element(self): filter = Filter( data={ "events": [{"id": "event_name", "order": 0}], - "properties": [{"key": "tag_name", "value": ["label"], "operator": "exact", "type": "element"}], + "properties": [ + { + "key": "tag_name", + "value": ["label"], + "operator": "exact", + "type": "element", + } + ], } ) @@ -335,7 +405,16 @@ def test_element(self): self._run_query( filter.shallow_clone( - {"properties": [{"key": "tag_name", "value": [], "operator": "exact", "type": "element"}]} + { + "properties": [ + { + "key": "tag_name", + "value": [], + "operator": "exact", + "type": "element", + } + ] + } ) ) @@ -343,9 +422,24 @@ def _create_groups_test_data(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={"another": "value"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:1", + properties={"another": "value"}, + ) Person.objects.create(team_id=self.team.pk, distinct_ids=["p1"], properties={"$browser": "test"}) Person.objects.create(team_id=self.team.pk, distinct_ids=["p2"], properties={"$browser": "foobar"}) @@ -390,8 +484,18 @@ def test_groups_filters(self): "date_to": "2020-01-12T00:00:00Z", "events": [{"id": "$pageview", "type": "events", "order": 0}], "properties": [ - {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}, - {"key": "another", "value": "value", "type": "group", "group_type_index": 1}, + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + }, + { + "key": "another", + "value": "value", + "type": "group", + "group_type_index": 1, + }, ], }, team=self.team, @@ -410,7 +514,12 @@ def test_groups_filters_mixed(self): "date_to": "2020-01-12T00:00:00Z", "events": [{"id": "$pageview", "type": "events", "order": 0}], "properties": [ - {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}, + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + }, {"key": "$browser", "value": "test", "type": "person"}, ], }, @@ -422,7 +531,6 @@ def test_groups_filters_mixed(self): @snapshot_clickhouse_queries def test_entity_filtered_by_session_duration(self): - filter = Filter( data={ "date_from": "2021-05-02 00:00:00", @@ -431,7 +539,14 @@ def test_entity_filtered_by_session_duration(self): { "id": "$pageview", "order": 0, - "properties": [{"key": "$session_duration", "type": "session", "operator": "gt", "value": 90}], + "properties": [ + { + "key": "$session_duration", + "type": "session", + "operator": "gt", + "value": 90, + } + ], } ], } @@ -473,7 +588,12 @@ def test_entity_filtered_by_session_duration(self): ) # Event with no session - _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2021-05-02 00:02:00") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2021-05-02 00:02:00", + ) results, _ = self._run_query(filter) self.assertEqual(len(results), 1) @@ -481,7 +601,6 @@ def test_entity_filtered_by_session_duration(self): @snapshot_clickhouse_queries def test_entity_filtered_by_multiple_session_duration_filters(self): - filter = Filter( data={ "date_from": "2021-05-02 00:00:00", @@ -491,8 +610,18 @@ def test_entity_filtered_by_multiple_session_duration_filters(self): "id": "$pageview", "order": 0, "properties": [ - {"key": "$session_duration", "type": "session", "operator": "gt", "value": 90}, - {"key": "$session_duration", "type": "session", "operator": "lt", "value": 150}, + { + "key": "$session_duration", + "type": "session", + "operator": "gt", + "value": 90, + }, + { + "key": "$session_duration", + "type": "session", + "operator": "lt", + "value": 150, + }, ], } ], @@ -555,7 +684,6 @@ def test_entity_filtered_by_multiple_session_duration_filters(self): @snapshot_clickhouse_queries def test_unique_session_math_filtered_by_session_duration(self): - filter = Filter( data={ "date_from": "2021-05-02 00:00:00", @@ -565,7 +693,14 @@ def test_unique_session_math_filtered_by_session_duration(self): "id": "$pageview", "math": "unique_session", "order": 0, - "properties": [{"key": "$session_duration", "type": "session", "operator": "gt", "value": 30}], + "properties": [ + { + "key": "$session_duration", + "type": "session", + "operator": "gt", + "value": 30, + } + ], } ], } diff --git a/ee/clickhouse/queries/test/test_groups_join_query.py b/ee/clickhouse/queries/test/test_groups_join_query.py index 2b14442d8fa02..1564cf8f50a50 100644 --- a/ee/clickhouse/queries/test/test_groups_join_query.py +++ b/ee/clickhouse/queries/test/test_groups_join_query.py @@ -10,7 +10,16 @@ def test_groups_join_query_blank(): def test_groups_join_query_filtering(snapshot): filter = Filter( - data={"properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}]} + data={ + "properties": [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ] + } ) assert GroupsJoinQuery(filter, 2).get_join_query() == snapshot @@ -20,8 +29,18 @@ def test_groups_join_query_filtering_with_custom_key_names(snapshot): filter = Filter( data={ "properties": [ - {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}, - {"key": "company", "value": "crashed", "type": "group", "group_type_index": 2}, + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + }, + { + "key": "company", + "value": "crashed", + "type": "group", + "group_type_index": 2, + }, ] } ) diff --git a/ee/clickhouse/queries/test/test_lifecycle.py b/ee/clickhouse/queries/test/test_lifecycle.py index af43cb05d2655..ea0561fc8acb8 100644 --- a/ee/clickhouse/queries/test/test_lifecycle.py +++ b/ee/clickhouse/queries/test/test_lifecycle.py @@ -36,8 +36,18 @@ def test_test_account_filters_with_groups(self): self.team.save() GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) - create_group(self.team.pk, group_type_index=0, group_key="in", properties={"key": "value"}) - create_group(self.team.pk, group_type_index=0, group_key="out", properties={"key": "othervalue"}) + create_group( + self.team.pk, + group_type_index=0, + group_key="in", + properties={"key": "value"}, + ) + create_group( + self.team.pk, + group_type_index=0, + group_key="out", + properties={"key": "othervalue"}, + ) with freeze_time("2020-01-11T12:00:00Z"): Person.objects.create(distinct_ids=["person1"], team_id=self.team.pk) @@ -48,12 +58,28 @@ def test_test_account_filters_with_groups(self): journeys_for( { "person1": [ - {"event": "$pageview", "timestamp": datetime(2020, 1, 11, 12), "properties": {"$group_0": "out"}} + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 11, 12), + "properties": {"$group_0": "out"}, + } ], "person2": [ - {"event": "$pageview", "timestamp": datetime(2020, 1, 9, 12), "properties": {"$group_0": "in"}}, - {"event": "$pageview", "timestamp": datetime(2020, 1, 12, 12), "properties": {"$group_0": "in"}}, - {"event": "$pageview", "timestamp": datetime(2020, 1, 15, 12), "properties": {"$group_0": "in"}}, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 9, 12), + "properties": {"$group_0": "in"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 12, 12), + "properties": {"$group_0": "in"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 15, 12), + "properties": {"$group_0": "in"}, + }, ], }, self.team, @@ -169,7 +195,10 @@ def test_interval_dates_weeks(self): {"status": "returning", "data": [1] * 5}, ], ) - self.assertEqual(result[0]["days"], ["2021-04-05", "2021-04-12", "2021-04-19", "2021-04-26", "2021-05-03"]) + self.assertEqual( + result[0]["days"], + ["2021-04-05", "2021-04-12", "2021-04-19", "2021-04-26", "2021-05-03"], + ) @snapshot_clickhouse_queries def test_interval_dates_months(self): @@ -199,7 +228,10 @@ def test_lifecycle_hogql_event_properties(self): "date_from": "-7d", "interval": "day", "properties": [ - {"key": "like(properties.$current_url, '%example%') and 'bla' != 'a%sd'", "type": "hogql"}, + { + "key": "like(properties.$current_url, '%example%') and 'bla' != 'a%sd'", + "type": "hogql", + }, ], } ) @@ -223,7 +255,10 @@ def test_lifecycle_hogql_person_properties(self): "date_from": "-7d", "interval": "day", "properties": [ - {"key": "like(person.properties.email, '%test.com')", "type": "hogql"}, + { + "key": "like(person.properties.email, '%test.com')", + "type": "hogql", + }, ], } ) @@ -241,7 +276,9 @@ def test_lifecycle_hogql_person_properties(self): def _setup_returning_lifecycle_data(self, days): with freeze_time("2019-01-01T12:00:00Z"): Person.objects.create( - distinct_ids=["person1"], team_id=self.team.pk, properties={"email": "person@test.com"} + distinct_ids=["person1"], + team_id=self.team.pk, + properties={"email": "person@test.com"}, ) journeys_for( @@ -261,7 +298,11 @@ def _setup_returning_lifecycle_data(self, days): def _run_lifecycle(self, data): filter = Filter( - data={"events": [{"id": "$pageview", "type": "events", "order": 0}], "shown_as": TRENDS_LIFECYCLE, **data}, + data={ + "events": [{"id": "$pageview", "type": "events", "order": 0}], + "shown_as": TRENDS_LIFECYCLE, + **data, + }, team=self.team, ) return Trends().run(filter, self.team) diff --git a/ee/clickhouse/queries/test/test_paths.py b/ee/clickhouse/queries/test/test_paths.py index 03e4203092377..fdaf25a043a6d 100644 --- a/ee/clickhouse/queries/test/test_paths.py +++ b/ee/clickhouse/queries/test/test_paths.py @@ -19,7 +19,9 @@ from posthog.models.instance_setting import override_instance_config from posthog.queries.paths import Paths, PathsActors from posthog.queries.paths.paths_event_query import PathEventQuery -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -36,24 +38,52 @@ class TestClickhousePaths(ClickhouseTestMixin, APIBaseTest): - maxDiff = None def _create_groups(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) create_group( - team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={"industry": "technology"} + team_id=self.team.pk, + group_type_index=1, + group_key="company:1", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:2", + properties={}, ) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:2", properties={}) - def _get_people_at_path(self, filter, path_start=None, path_end=None, funnel_filter=None, path_dropoff=None): + def _get_people_at_path( + self, + filter, + path_start=None, + path_end=None, + funnel_filter=None, + path_dropoff=None, + ): person_filter = filter.shallow_clone( - {"path_start_key": path_start, "path_end_key": path_end, "path_dropoff_key": path_dropoff} + { + "path_start_key": path_start, + "path_end_key": path_end, + "path_dropoff_key": path_dropoff, + } ) _, serialized_actors, _ = PathsActors(person_filter, self.team, funnel_filter).get_actors() return [row["id"] for row in serialized_actors] @@ -62,41 +92,57 @@ def _get_people_at_path(self, filter, path_start=None, path_end=None, funnel_fil def test_step_limit(self): p1 = _create_person(team_id=self.team.pk, distinct_ids=["fake"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), - _create_event( - properties={"$current_url": "/4"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:27:34", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/4"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:27:34", + ), + ) with freeze_time("2012-01-7T03:21:34.000Z"): filter = PathFilter(team=self.team, data={"step_limit": 2}) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( - response, [{"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}] + response, + [ + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + } + ], ) self.assertEqual([p1.uuid], self._get_people_at_path(filter, "1_/1", "2_/2")) self.assertEqual([], self._get_people_at_path(filter, "2_/2", "3_/3")) @@ -108,8 +154,18 @@ def test_step_limit(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) self.assertEqual([p1.uuid], self._get_people_at_path(filter, "2_/2", "3_/3")) @@ -121,9 +177,24 @@ def test_step_limit(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "3_/3", "target": "4_/4", "value": 1, "average_conversion_time": 3 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "3_/3", + "target": "4_/4", + "value": 1, + "average_conversion_time": 3 * ONE_MINUTE, + }, ], ) self.assertEqual([p1.uuid], self._get_people_at_path(filter, "1_/1", "2_/2")) @@ -135,70 +206,104 @@ def test_step_limit(self): def test_step_conversion_times(self): _create_person(team_id=self.team.pk, distinct_ids=["fake"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), - _create_event( - properties={"$current_url": "/4"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:27:34", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/4"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:27:34", + ), + ) _create_person(team_id=self.team.pk, distinct_ids=["fake2"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="fake2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2"}, - distinct_id="fake2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:23:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="fake2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:27:34", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="fake2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2"}, + distinct_id="fake2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:23:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="fake2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:27:34", + ), + ) filter = PathFilter( - team=self.team, data={"step_limit": 4, "date_from": "2012-01-01", "include_event_types": ["$pageview"]} + team=self.team, + data={ + "step_limit": 4, + "date_from": "2012-01-01", + "include_event_types": ["$pageview"], + }, ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 2, "average_conversion_time": 1.5 * ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 2, "average_conversion_time": 3 * ONE_MINUTE}, - {"source": "3_/3", "target": "4_/4", "value": 1, "average_conversion_time": 3 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 2, + "average_conversion_time": 1.5 * ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 2, + "average_conversion_time": 3 * ONE_MINUTE, + }, + { + "source": "3_/3", + "target": "4_/4", + "value": 1, + "average_conversion_time": 3 * ONE_MINUTE, + }, ], ) @@ -246,15 +351,34 @@ def test_event_ordering(self): filter = PathFilter( team=self.team, - data={"date_from": "2021-05-01", "date_to": "2021-05-03", "include_event_types": ["custom_event"]}, + data={ + "date_from": "2021-05-01", + "date_to": "2021-05-03", + "include_event_types": ["custom_event"], + }, ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_step one", "target": "2_step two", "value": 50, "average_conversion_time": 60000.0}, - {"source": "2_step two", "target": "3_step three", "value": 50, "average_conversion_time": 60000.0}, - {"source": "3_step three", "target": "4_step branch", "value": 25, "average_conversion_time": 60000.0}, + { + "source": "1_step one", + "target": "2_step two", + "value": 50, + "average_conversion_time": 60000.0, + }, + { + "source": "2_step two", + "target": "3_step three", + "value": 50, + "average_conversion_time": 60000.0, + }, + { + "source": "3_step three", + "target": "4_step branch", + "value": 25, + "average_conversion_time": 60000.0, + }, ], ) @@ -444,7 +568,12 @@ def test_wildcard_groups(self): "value": 10, "average_conversion_time": 160000, }, - {"source": "3_step two", "target": "4_step three", "value": 5, "average_conversion_time": ONE_MINUTE}, + { + "source": "3_step two", + "target": "4_step three", + "value": 5, + "average_conversion_time": ONE_MINUTE, + }, ], ) @@ -454,87 +583,105 @@ def test_team_path_cleaning_rules(self): _create_person(distinct_ids=[f"user_2"], team=self.team) _create_person(distinct_ids=[f"user_3"], team=self.team) - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step3?key=value1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step3?key=value2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step3?key=value3"}, - } - ), + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step3?key=value1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step3?key=value2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step3?key=value3"}, + } + ), + ) self.team.path_cleaning_filters = [{"alias": "?", "regex": "\\?(.*)"}] self.team.save() @@ -590,87 +737,105 @@ def test_team_path_cleaning_rules(self): @snapshot_clickhouse_queries def test_team_and_local_path_cleaning_rules(self): - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2/5"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step2/5?key=value1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2/5"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step2/5?key=value2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2/5"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step2/5?key=value3"}, - } - ), + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2/5"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step2/5?key=value1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2/5"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step2/5?key=value2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2/5"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step2/5?key=value3"}, + } + ), + ) _create_person(distinct_ids=[f"user_1"], team=self.team) @@ -739,109 +904,127 @@ def test_path_cleaning_rules_with_wildcard_groups(self): _create_person(distinct_ids=[f"user_2"], team=self.team) _create_person(distinct_ids=[f"user_3"], team=self.team) - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1/foo"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_1", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step3?key=value1"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1/bar"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_2", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step3?key=value2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:00:00", - "properties": {"$current_url": "test.com/step1/baz"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:01:00", - "properties": {"$current_url": "test.com/step2"}, - } - ), - _create_event( - **{ - "event": "$pageview", - "distinct_id": f"user_3", - "team": self.team, - "timestamp": "2021-05-01 00:02:00", - "properties": {"$current_url": "test.com/step3?key=value3"}, - } - ), - - data = { - "insight": INSIGHT_FUNNELS, - "include_event_types": ["$pageview"], - "path_groupings": ["/step1"], - "date_from": "2021-05-01 00:00:00", - "date_to": "2021-05-07 00:00:00", - "local_path_cleaning_filters": [{"alias": "?", "regex": "\\?(.*)"}], - "start_point": "/step1", - } - path_filter = PathFilter(data=data, team=self.team) - response = Paths(team=self.team, filter=path_filter).run() - - self.assertEqual( - response, - [ - { - "source": "1_/step1", - "target": "2_test.com/step2", - "value": 3, - "average_conversion_time": 60000.0, - }, + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1/foo"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_1", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step3?key=value1"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1/bar"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_2", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step3?key=value2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + "properties": {"$current_url": "test.com/step1/baz"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:01:00", + "properties": {"$current_url": "test.com/step2"}, + } + ), + ) + ( + _create_event( + **{ + "event": "$pageview", + "distinct_id": f"user_3", + "team": self.team, + "timestamp": "2021-05-01 00:02:00", + "properties": {"$current_url": "test.com/step3?key=value3"}, + } + ), + ) + + data = { + "insight": INSIGHT_FUNNELS, + "include_event_types": ["$pageview"], + "path_groupings": ["/step1"], + "date_from": "2021-05-01 00:00:00", + "date_to": "2021-05-07 00:00:00", + "local_path_cleaning_filters": [{"alias": "?", "regex": "\\?(.*)"}], + "start_point": "/step1", + } + path_filter = PathFilter(data=data, team=self.team) + response = Paths(team=self.team, filter=path_filter).run() + + self.assertEqual( + response, + [ + { + "source": "1_/step1", + "target": "2_test.com/step2", + "value": 3, + "average_conversion_time": 60000.0, + }, { "source": "2_test.com/step2", "target": "3_test.com/step3?", @@ -875,7 +1058,12 @@ def test_by_funnel_after_dropoff(self): self.assertEqual( response, [ - {"source": "1_step one", "target": "2_step dropoff1", "value": 20, "average_conversion_time": 60000.0}, + { + "source": "1_step one", + "target": "2_step dropoff1", + "value": 20, + "average_conversion_time": 60000.0, + }, { "source": "2_step dropoff1", "target": "3_step dropoff2", @@ -890,15 +1078,21 @@ def test_by_funnel_after_dropoff(self): }, ], ) - self.assertEqual(20, len(self._get_people_at_path(path_filter, "1_step one", "2_step dropoff1", funnel_filter))) self.assertEqual( - 20, len(self._get_people_at_path(path_filter, "2_step dropoff1", "3_step dropoff2", funnel_filter)) + 20, + len(self._get_people_at_path(path_filter, "1_step one", "2_step dropoff1", funnel_filter)), + ) + self.assertEqual( + 20, + len(self._get_people_at_path(path_filter, "2_step dropoff1", "3_step dropoff2", funnel_filter)), ) self.assertEqual( - 10, len(self._get_people_at_path(path_filter, "3_step dropoff2", "4_step branch", funnel_filter)) + 10, + len(self._get_people_at_path(path_filter, "3_step dropoff2", "4_step branch", funnel_filter)), ) self.assertEqual( - 0, len(self._get_people_at_path(path_filter, "4_step branch", "3_step dropoff2", funnel_filter)) + 0, + len(self._get_people_at_path(path_filter, "4_step branch", "3_step dropoff2", funnel_filter)), ) @snapshot_clickhouse_queries @@ -923,13 +1117,27 @@ def test_by_funnel_after_dropoff_with_group_filter(self): funnel_filter = Filter(data=data, team=self.team) # passing group properties to funnel filter defeats purpose of test path_filter = PathFilter(data=data, team=self.team).shallow_clone( - {"properties": [{"key": "industry", "value": "technology", "type": "group", "group_type_index": 0}]} + { + "properties": [ + { + "key": "industry", + "value": "technology", + "type": "group", + "group_type_index": 0, + } + ] + } ) response = Paths(team=self.team, filter=path_filter, funnel_filter=funnel_filter).run() self.assertEqual( response, [ - {"source": "1_step one", "target": "2_step dropoff1", "value": 20, "average_conversion_time": 60000.0}, + { + "source": "1_step one", + "target": "2_step dropoff1", + "value": 20, + "average_conversion_time": 60000.0, + }, { "source": "2_step dropoff1", "target": "3_step dropoff2", @@ -944,15 +1152,21 @@ def test_by_funnel_after_dropoff_with_group_filter(self): }, ], ) - self.assertEqual(20, len(self._get_people_at_path(path_filter, "1_step one", "2_step dropoff1", funnel_filter))) self.assertEqual( - 20, len(self._get_people_at_path(path_filter, "2_step dropoff1", "3_step dropoff2", funnel_filter)) + 20, + len(self._get_people_at_path(path_filter, "1_step one", "2_step dropoff1", funnel_filter)), ) self.assertEqual( - 10, len(self._get_people_at_path(path_filter, "3_step dropoff2", "4_step branch", funnel_filter)) + 20, + len(self._get_people_at_path(path_filter, "2_step dropoff1", "3_step dropoff2", funnel_filter)), ) self.assertEqual( - 0, len(self._get_people_at_path(path_filter, "4_step branch", "3_step dropoff2", funnel_filter)) + 10, + len(self._get_people_at_path(path_filter, "3_step dropoff2", "4_step branch", funnel_filter)), + ) + self.assertEqual( + 0, + len(self._get_people_at_path(path_filter, "4_step branch", "3_step dropoff2", funnel_filter)), ) def test_by_funnel_after_step_respects_conversion_window(self): @@ -1107,15 +1321,21 @@ def test_by_funnel_after_step_respects_conversion_window(self): }, ], ) - self.assertEqual(20, len(self._get_people_at_path(path_filter, "1_step one", "2_step dropoff1", funnel_filter))) self.assertEqual( - 20, len(self._get_people_at_path(path_filter, "2_step dropoff1", "3_step dropoff2", funnel_filter)) + 20, + len(self._get_people_at_path(path_filter, "1_step one", "2_step dropoff1", funnel_filter)), + ) + self.assertEqual( + 20, + len(self._get_people_at_path(path_filter, "2_step dropoff1", "3_step dropoff2", funnel_filter)), ) self.assertEqual( - 10, len(self._get_people_at_path(path_filter, "3_step dropoff2", "4_step branch", funnel_filter)) + 10, + len(self._get_people_at_path(path_filter, "3_step dropoff2", "4_step branch", funnel_filter)), ) self.assertEqual( - 0, len(self._get_people_at_path(path_filter, "4_step branch", "3_step dropoff2", funnel_filter)) + 0, + len(self._get_people_at_path(path_filter, "4_step branch", "3_step dropoff2", funnel_filter)), ) @snapshot_clickhouse_queries @@ -1154,7 +1374,12 @@ def test_by_funnel_after_step(self): "value": 10, "average_conversion_time": 80000.0, }, - {"source": "1_step two", "target": "2_step three", "value": 5, "average_conversion_time": 60000.0}, + { + "source": "1_step two", + "target": "2_step three", + "value": 5, + "average_conversion_time": 60000.0, + }, ], ) @@ -1232,7 +1457,12 @@ def test_by_funnel_after_step_limit(self): self.assertEqual( response, [ - {"source": "1_step two", "target": "2_step three", "value": 105, "average_conversion_time": 60000.0}, + { + "source": "1_step two", + "target": "2_step three", + "value": 105, + "average_conversion_time": 60000.0, + }, { "source": "1_step two", "target": "2_between_step_2_a", @@ -1413,19 +1643,38 @@ def test_by_funnel_between_step(self): ], ) self.assertEqual( - 15, len(self._get_people_at_path(path_filter, "1_step one", "2_between_step_1_a", funnel_filter)) + 15, + len(self._get_people_at_path(path_filter, "1_step one", "2_between_step_1_a", funnel_filter)), ) self.assertEqual( - 15, len(self._get_people_at_path(path_filter, "2_between_step_1_a", "3_between_step_1_b", funnel_filter)) + 15, + len( + self._get_people_at_path( + path_filter, + "2_between_step_1_a", + "3_between_step_1_b", + funnel_filter, + ) + ), ) self.assertEqual( - 10, len(self._get_people_at_path(path_filter, "3_between_step_1_b", "4_step two", funnel_filter)) + 10, + len(self._get_people_at_path(path_filter, "3_between_step_1_b", "4_step two", funnel_filter)), ) self.assertEqual( - 5, len(self._get_people_at_path(path_filter, "3_between_step_1_b", "4_between_step_1_c", funnel_filter)) + 5, + len( + self._get_people_at_path( + path_filter, + "3_between_step_1_b", + "4_between_step_1_c", + funnel_filter, + ) + ), ) self.assertEqual( - 5, len(self._get_people_at_path(path_filter, "4_between_step_1_c", "5_step two", funnel_filter)) + 5, + len(self._get_people_at_path(path_filter, "4_between_step_1_c", "5_step two", funnel_filter)), ) @also_test_with_materialized_columns(["$current_url", "$screen_name"]) @@ -1549,13 +1798,48 @@ def test_end(self): self.assertEqual( response, [ - {"source": "1_/2", "target": "2_/3", "value": 1, "average_conversion_time": 60000.0}, - {"source": "1_/3", "target": "2_/4", "value": 1, "average_conversion_time": 60000.0}, - {"source": "1_/5", "target": "2_/about", "value": 1, "average_conversion_time": 60000.0}, - {"source": "2_/3", "target": "3_/4", "value": 1, "average_conversion_time": 60000.0}, - {"source": "2_/4", "target": "3_/about", "value": 1, "average_conversion_time": 60000.0}, - {"source": "3_/4", "target": "4_/5", "value": 1, "average_conversion_time": 60000.0}, - {"source": "4_/5", "target": "5_/about", "value": 1, "average_conversion_time": 60000.0}, + { + "source": "1_/2", + "target": "2_/3", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "1_/3", + "target": "2_/4", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "1_/5", + "target": "2_/about", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "2_/3", + "target": "3_/4", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "2_/4", + "target": "3_/about", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "3_/4", + "target": "4_/5", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "4_/5", + "target": "5_/about", + "value": 1, + "average_conversion_time": 60000.0, + }, ], ) @@ -1573,13 +1857,48 @@ def test_end(self): self.assertEqual( response, [ - {"source": "1_/2", "target": "2_/3", "value": 1, "average_conversion_time": 60000.0}, - {"source": "1_/3", "target": "2_/4", "value": 1, "average_conversion_time": 60000.0}, - {"source": "1_/5", "target": "2_/about", "value": 1, "average_conversion_time": 60000.0}, - {"source": "2_/3", "target": "3_/4", "value": 1, "average_conversion_time": 60000.0}, - {"source": "2_/4", "target": "3_/about", "value": 1, "average_conversion_time": 60000.0}, - {"source": "3_/4", "target": "4_/5", "value": 1, "average_conversion_time": 60000.0}, - {"source": "4_/5", "target": "5_/about", "value": 1, "average_conversion_time": 60000.0}, + { + "source": "1_/2", + "target": "2_/3", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "1_/3", + "target": "2_/4", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "1_/5", + "target": "2_/about", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "2_/3", + "target": "3_/4", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "2_/4", + "target": "3_/about", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "3_/4", + "target": "4_/5", + "value": 1, + "average_conversion_time": 60000.0, + }, + { + "source": "4_/5", + "target": "5_/about", + "value": 1, + "average_conversion_time": 60000.0, + }, ], ) @@ -1641,23 +1960,53 @@ def test_event_inclusion_exclusion_filters(self): # P3 for custom event _create_person(team_id=self.team.pk, distinct_ids=["p3"]) p3 = [ - _create_event(distinct_id="p3", event="/custom1", team=self.team, timestamp="2012-01-01 03:21:34"), - _create_event(distinct_id="p3", event="/custom2", team=self.team, timestamp="2012-01-01 03:22:34"), - _create_event(distinct_id="p3", event="/custom3", team=self.team, timestamp="2012-01-01 03:24:34"), + _create_event( + distinct_id="p3", + event="/custom1", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + _create_event( + distinct_id="p3", + event="/custom2", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + _create_event( + distinct_id="p3", + event="/custom3", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), ] _ = [*p1, *p2, *p3] filter = PathFilter( - team=self.team, data={"step_limit": 4, "date_from": "2012-01-01", "include_event_types": ["$pageview"]} + team=self.team, + data={ + "step_limit": 4, + "date_from": "2012-01-01", + "include_event_types": ["$pageview"], + }, ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -1667,8 +2016,18 @@ def test_event_inclusion_exclusion_filters(self): self.assertEqual( response, [ - {"source": "1_/screen1", "target": "2_/screen2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/screen2", "target": "3_/screen3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/screen1", + "target": "2_/screen2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/screen2", + "target": "3_/screen3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -1678,17 +2037,39 @@ def test_event_inclusion_exclusion_filters(self): self.assertEqual( response, [ - {"source": "1_/custom1", "target": "2_/custom2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/custom2", "target": "3_/custom3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/custom1", + "target": "2_/custom2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/custom2", + "target": "3_/custom3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) - filter = filter.shallow_clone({"include_event_types": [], "include_custom_events": ["/custom1", "/custom2"]}) + filter = filter.shallow_clone( + { + "include_event_types": [], + "include_custom_events": ["/custom1", "/custom2"], + } + ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, - [{"source": "1_/custom1", "target": "2_/custom2", "value": 1, "average_conversion_time": ONE_MINUTE}], + [ + { + "source": "1_/custom1", + "target": "2_/custom2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + } + ], ) filter = filter.shallow_clone({"include_event_types": [], "include_custom_events": ["/custom3", "blah"]}) @@ -1697,19 +2078,52 @@ def test_event_inclusion_exclusion_filters(self): self.assertEqual(response, []) filter = filter.shallow_clone( - {"include_event_types": ["$pageview", "$screen", "custom_event"], "include_custom_events": []} + { + "include_event_types": ["$pageview", "$screen", "custom_event"], + "include_custom_events": [], + } ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "1_/custom1", "target": "2_/custom2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "1_/screen1", "target": "2_/screen2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "2_/custom2", "target": "3_/custom3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "2_/screen2", "target": "3_/screen3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "1_/custom1", + "target": "2_/custom2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "1_/screen1", + "target": "2_/screen2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "2_/custom2", + "target": "3_/custom3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "2_/screen2", + "target": "3_/screen3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -1724,9 +2138,24 @@ def test_event_inclusion_exclusion_filters(self): self.assertEqual( response, [ - {"source": "1_/custom2", "target": "2_/custom3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "1_/screen1", "target": "2_/screen2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/screen2", "target": "3_/screen3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/custom2", + "target": "2_/custom3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "1_/screen1", + "target": "2_/screen2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/screen2", + "target": "3_/screen3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -1826,7 +2255,15 @@ def test_event_exclusion_filters_with_wildcard_groups(self): response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( - response, [{"source": "1_/1", "target": "2_/3", "value": 3, "average_conversion_time": 3 * ONE_MINUTE}] + response, + [ + { + "source": "1_/1", + "target": "2_/3", + "value": 3, + "average_conversion_time": 3 * ONE_MINUTE, + } + ], ) filter = filter.shallow_clone({"path_groupings": ["/xxx/invalid/*"]}) @@ -1835,55 +2272,87 @@ def test_event_exclusion_filters_with_wildcard_groups(self): self.assertEqual(len(response), 6) def test_event_inclusion_exclusion_filters_across_single_person(self): - # P1 for pageview event, screen event, and custom event all together _create_person(team_id=self.team.pk, distinct_ids=["p1"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), - _create_event( - properties={"$screen_name": "/screen1"}, - distinct_id="p1", - event="$screen", - team=self.team, - timestamp="2012-01-01 03:25:34", - ), - _create_event( - properties={"$screen_name": "/screen2"}, - distinct_id="p1", - event="$screen", - team=self.team, - timestamp="2012-01-01 03:26:34", - ), - _create_event( - properties={"$screen_name": "/screen3"}, - distinct_id="p1", - event="$screen", - team=self.team, - timestamp="2012-01-01 03:28:34", - ), - _create_event(distinct_id="p1", event="/custom1", team=self.team, timestamp="2012-01-01 03:29:34"), - _create_event(distinct_id="p1", event="/custom2", team=self.team, timestamp="2012-01-01 03:30:34"), - _create_event(distinct_id="p1", event="/custom3", team=self.team, timestamp="2012-01-01 03:32:34"), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) + ( + _create_event( + properties={"$screen_name": "/screen1"}, + distinct_id="p1", + event="$screen", + team=self.team, + timestamp="2012-01-01 03:25:34", + ), + ) + ( + _create_event( + properties={"$screen_name": "/screen2"}, + distinct_id="p1", + event="$screen", + team=self.team, + timestamp="2012-01-01 03:26:34", + ), + ) + ( + _create_event( + properties={"$screen_name": "/screen3"}, + distinct_id="p1", + event="$screen", + team=self.team, + timestamp="2012-01-01 03:28:34", + ), + ) + ( + _create_event( + distinct_id="p1", + event="/custom1", + team=self.team, + timestamp="2012-01-01 03:29:34", + ), + ) + ( + _create_event( + distinct_id="p1", + event="/custom2", + team=self.team, + timestamp="2012-01-01 03:30:34", + ), + ) + ( + _create_event( + distinct_id="p1", + event="/custom3", + team=self.team, + timestamp="2012-01-01 03:32:34", + ), + ) filter = PathFilter( team=self.team, data={"step_limit": 10, "date_from": "2012-01-01"} @@ -1893,14 +2362,54 @@ def test_event_inclusion_exclusion_filters_across_single_person(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "3_/3", "target": "4_/screen1", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "4_/screen1", "target": "5_/screen2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "5_/screen2", "target": "6_/screen3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "6_/screen3", "target": "7_/custom1", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "7_/custom1", "target": "8_/custom2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "8_/custom2", "target": "9_/custom3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "3_/3", + "target": "4_/screen1", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "4_/screen1", + "target": "5_/screen2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "5_/screen2", + "target": "6_/screen3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "6_/screen3", + "target": "7_/custom1", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "7_/custom1", + "target": "8_/custom2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "8_/custom2", + "target": "9_/custom3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -1910,28 +2419,86 @@ def test_event_inclusion_exclusion_filters_across_single_person(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "3_/3", "target": "4_/screen1", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "4_/screen1", "target": "5_/screen2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "5_/screen2", "target": "6_/screen3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "3_/3", + "target": "4_/screen1", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "4_/screen1", + "target": "5_/screen2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "5_/screen2", + "target": "6_/screen3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) filter = filter.shallow_clone( - {"include_event_types": ["$pageview", "$screen"], "include_custom_events": ["/custom2"]} + { + "include_event_types": ["$pageview", "$screen"], + "include_custom_events": ["/custom2"], + } ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "3_/3", "target": "4_/screen1", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "4_/screen1", "target": "5_/screen2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "5_/screen2", "target": "6_/screen3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "6_/screen3", "target": "7_/custom2", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "3_/3", + "target": "4_/screen1", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "4_/screen1", + "target": "5_/screen2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "5_/screen2", + "target": "6_/screen3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "6_/screen3", + "target": "7_/custom2", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -1947,9 +2514,24 @@ def test_event_inclusion_exclusion_filters_across_single_person(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, - {"source": "3_/3", "target": "4_/custom2", "value": 1, "average_conversion_time": 6 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, + { + "source": "3_/3", + "target": "4_/custom2", + "value": 1, + "average_conversion_time": 6 * ONE_MINUTE, + }, ], ) @@ -1958,48 +2540,60 @@ def test_event_inclusion_exclusion_filters_across_single_person(self): def test_respect_session_limits(self): _create_person(team_id=self.team.pk, distinct_ids=["fake"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), - _create_event( - properties={"$current_url": "/1"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-02 03:21:54", # new day, new session - ), - _create_event( - properties={"$current_url": "/2/"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-02 03:22:54", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-02 03:26:54", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-02 03:21:54", # new day, new session + ), + ) + ( + _create_event( + properties={"$current_url": "/2/"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-02 03:22:54", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-02 03:26:54", + ), + ) filter = PathFilter(team=self.team, data={"date_from": "2012-01-01"}) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) @@ -2007,73 +2601,99 @@ def test_respect_session_limits(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 2, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 2, "average_conversion_time": 3 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 2, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 2, + "average_conversion_time": 3 * ONE_MINUTE, + }, ], ) def test_removes_duplicates(self): _create_person(team_id=self.team.pk, distinct_ids=["fake"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/1"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:54", - ), - _create_event( - properties={"$current_url": "/2"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/2/"}, # trailing slash should be removed - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:54", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="fake", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:54", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:54", + ), + ) + ( + _create_event( + properties={"$current_url": "/2"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2/"}, # trailing slash should be removed + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:54", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="fake", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:54", + ), + ) _create_person(team_id=self.team.pk, distinct_ids=["fake2"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="fake2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2/"}, - distinct_id="fake2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:23:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="fake2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:27:34", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="fake2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2/"}, + distinct_id="fake2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:23:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="fake2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:27:34", + ), + ) filter = PathFilter(team=self.team, data={"date_from": "2012-01-01"}) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) @@ -2081,8 +2701,18 @@ def test_removes_duplicates(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 2, "average_conversion_time": 1.5 * ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 2, "average_conversion_time": 3 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 2, + "average_conversion_time": 1.5 * ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 2, + "average_conversion_time": 3 * ONE_MINUTE, + }, ], ) @@ -2091,103 +2721,129 @@ def test_removes_duplicates(self): def test_start_and_end(self): p1 = _create_person(team_id=self.team.pk, distinct_ids=["person_1"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:01:00", - ), - _create_event( - properties={"$current_url": "/2"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:02:00", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:03:00", - ), - _create_event( - properties={"$current_url": "/4"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:04:00", - ), - _create_event( - properties={"$current_url": "/5"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:05:00", - ), - _create_event( - properties={"$current_url": "/about"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:06:00", - ), - _create_event( - properties={"$current_url": "/after"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:07:00", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:01:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/2"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:02:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:03:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/4"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:04:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/5"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:05:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:06:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/after"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:07:00", + ), + ) p2 = _create_person(team_id=self.team.pk, distinct_ids=["person_2"]) - _create_event( - properties={"$current_url": "/5"}, - distinct_id="person_2", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:01:00", - ), - _create_event( - properties={"$current_url": "/about"}, - distinct_id="person_2", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:02:00", - ), + ( + _create_event( + properties={"$current_url": "/5"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:01:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:02:00", + ), + ) _create_person(team_id=self.team.pk, distinct_ids=["person_3"]) - _create_event( - properties={"$current_url": "/3"}, - distinct_id="person_3", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:01:00", - ), - _create_event( - properties={"$current_url": "/4"}, - distinct_id="person_3", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:02:00", - ), - _create_event( - properties={"$current_url": "/about"}, - distinct_id="person_3", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:03:00", - ), - _create_event( - properties={"$current_url": "/after"}, - distinct_id="person_3", - event="$pageview", - team=self.team, - timestamp="2021-05-01 00:04:00", - ), + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:01:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/4"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:02:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:03:00", + ), + ) + ( + _create_event( + properties={"$current_url": "/after"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + timestamp="2021-05-01 00:04:00", + ), + ) filter = PathFilter( team=self.team, @@ -2201,7 +2857,15 @@ def test_start_and_end(self): ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( - response, [{"source": "1_/5", "target": "2_/about", "value": 2, "average_conversion_time": 60000.0}] + response, + [ + { + "source": "1_/5", + "target": "2_/about", + "value": 2, + "average_conversion_time": 60000.0, + } + ], ) self.assertCountEqual(self._get_people_at_path(filter, "1_/5", "2_/about"), [p1.uuid, p2.uuid]) @@ -2211,10 +2875,30 @@ def test_start_and_end(self): self.assertEqual( response, [ - {"source": "1_/2", "target": "2_/3", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/3", "target": "3_...", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "3_...", "target": "4_/5", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "4_/5", "target": "5_/about", "value": 1, "average_conversion_time": ONE_MINUTE}, + { + "source": "1_/2", + "target": "2_/3", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/3", + "target": "3_...", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "3_...", + "target": "4_/5", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "4_/5", + "target": "5_/about", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, ], ) self.assertCountEqual(self._get_people_at_path(filter, "3_...", "4_/5"), [p1.uuid]) @@ -2234,11 +2918,19 @@ def should_query_list(filter) -> Tuple[bool, bool]: filter = PathFilter({"include_event_types": ["$screen"]}) self.assertEqual(should_query_list(filter), (False, True)) - filter = filter.shallow_clone({"include_event_types": [], "include_custom_events": ["/custom1", "/custom2"]}) + filter = filter.shallow_clone( + { + "include_event_types": [], + "include_custom_events": ["/custom1", "/custom2"], + } + ) self.assertEqual(should_query_list(filter), (False, False)) filter = filter.shallow_clone( - {"include_event_types": ["$pageview", "$screen", "custom_event"], "include_custom_events": []} + { + "include_event_types": ["$pageview", "$screen", "custom_event"], + "include_custom_events": [], + } ) self.assertEqual(should_query_list(filter), (True, True)) @@ -2252,7 +2944,11 @@ def should_query_list(filter) -> Tuple[bool, bool]: self.assertEqual(should_query_list(filter), (True, True)) filter = filter.shallow_clone( - {"include_event_types": [], "include_custom_events": [], "exclude_events": ["$pageview"]} + { + "include_event_types": [], + "include_custom_events": [], + "exclude_events": ["$pageview"], + } ) self.assertEqual(should_query_list(filter), (False, True)) @@ -2262,77 +2958,95 @@ def test_wildcard_groups_across_people(self): # P1 for pageview event /2/bar/1/foo _create_person(team_id=self.team.pk, distinct_ids=["p1"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2/bar/1/foo"}, # regex matches, despite beginning with `/2/` - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2/bar/1/foo"}, # regex matches, despite beginning with `/2/` + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) # P2 for pageview event /bar/2/foo _create_person(team_id=self.team.pk, distinct_ids=["p2"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="p2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/bar/2/foo"}, - distinct_id="p2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="p2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="p2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/bar/2/foo"}, + distinct_id="p2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="p2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) # P3 for pageview event /bar/3/foo _create_person(team_id=self.team.pk, distinct_ids=["p3"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="p3", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/bar/33/foo"}, - distinct_id="p3", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="p3", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="p3", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/bar/33/foo"}, + distinct_id="p3", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="p3", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) filter = PathFilter( team=self.team, @@ -2348,8 +3062,18 @@ def test_wildcard_groups_across_people(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/bar/*/foo", "value": 3, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/bar/*/foo", "target": "3_/3", "value": 3, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/bar/*/foo", + "value": 3, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/bar/*/foo", + "target": "3_/3", + "value": 3, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -2360,73 +3084,102 @@ def test_wildcard_groups_evil_input(self): # P1 for pageview event /2/bar/1/foo _create_person(team_id=self.team.pk, distinct_ids=["p1"]) - _create_event( - properties={"$current_url": evil_string}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2/bar/aaa"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), + ( + _create_event( + properties={"$current_url": evil_string}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2/bar/aaa"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) # P2 for pageview event /2/bar/2/foo _create_person(team_id=self.team.pk, distinct_ids=["p2"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="p2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:21:34", - ), - _create_event( - properties={"$current_url": "/2/3?q=1"}, - distinct_id="p2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:22:34", - ), - _create_event( - properties={"$current_url": "/3?q=1"}, - distinct_id="p2", - event="$pageview", - team=self.team, - timestamp="2012-01-01 03:24:34", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="p2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:21:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/2/3?q=1"}, + distinct_id="p2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:22:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/3?q=1"}, + distinct_id="p2", + event="$pageview", + team=self.team, + timestamp="2012-01-01 03:24:34", + ), + ) filter = PathFilter( team=self.team, data={ "date_from": "2012-01-01", "include_event_types": ["$pageview"], - "path_groupings": ["(a+)+", "[aaa|aaaa]+", "1.*", ".*", "/3?q=1", "/3*"], + "path_groupings": [ + "(a+)+", + "[aaa|aaaa]+", + "1.*", + ".*", + "/3?q=1", + "/3*", + ], }, ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/3*", "value": 1, "average_conversion_time": 3 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/3*", + "value": 1, + "average_conversion_time": 3 * ONE_MINUTE, + }, { "source": f"1_{evil_string}", "target": "2_/2/bar/aaa", "value": 1, "average_conversion_time": ONE_MINUTE, }, - {"source": "2_/2/bar/aaa", "target": "3_/3*", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "2_/2/bar/aaa", + "target": "3_/3*", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -2686,12 +3439,37 @@ def test_start_dropping_orphaned_edges(self): self.assertEqual( response, [ - {"source": "1_/2", "target": "2_/3", "value": 5, "average_conversion_time": 60000.0}, - {"source": "2_/3", "target": "3_/4", "value": 5, "average_conversion_time": 60000.0}, - {"source": "3_/4", "target": "4_/5", "value": 5, "average_conversion_time": 60000.0}, - {"source": "4_/5", "target": "5_/about", "value": 5, "average_conversion_time": 60000.0}, + { + "source": "1_/2", + "target": "2_/3", + "value": 5, + "average_conversion_time": 60000.0, + }, + { + "source": "2_/3", + "target": "3_/4", + "value": 5, + "average_conversion_time": 60000.0, + }, + { + "source": "3_/4", + "target": "4_/5", + "value": 5, + "average_conversion_time": 60000.0, + }, + { + "source": "4_/5", + "target": "5_/about", + "value": 5, + "average_conversion_time": 60000.0, + }, # {'source': '3_/x', 'target': '4_/about', 'value': 2, 'average_conversion_time': 60000.0}, # gets deleted by validation since dangling - {"source": "1_/2", "target": "2_/a", "value": 1, "average_conversion_time": 30000.0}, + { + "source": "1_/2", + "target": "2_/a", + "value": 1, + "average_conversion_time": 30000.0, + }, ], ) @@ -2869,7 +3647,14 @@ def test_groups_filtering(self): "date_from": "2012-01-01", "date_to": "2012-02-01", "include_event_types": ["$pageview", "$screen", "custom_event"], - "properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}], + "properties": [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ], }, ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) @@ -2877,34 +3662,82 @@ def test_groups_filtering(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) filter = filter.shallow_clone( - {"properties": [{"key": "industry", "value": "technology", "type": "group", "group_type_index": 0}]} + { + "properties": [ + { + "key": "industry", + "value": "technology", + "type": "group", + "group_type_index": 0, + } + ] + } ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/screen1", "target": "2_/screen2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/screen2", "target": "3_/screen3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/screen1", + "target": "2_/screen2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/screen2", + "target": "3_/screen3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) filter = filter.shallow_clone( - {"properties": [{"key": "industry", "value": "technology", "type": "group", "group_type_index": 1}]} + { + "properties": [ + { + "key": "industry", + "value": "technology", + "type": "group", + "group_type_index": 1, + } + ] + } ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/custom1", "target": "2_/custom2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/custom2", "target": "3_/custom3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/custom1", + "target": "2_/custom2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/custom2", + "target": "3_/custom3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -3003,7 +3836,14 @@ def test_groups_filtering_person_on_events(self): "date_from": "2012-01-01", "date_to": "2012-02-01", "include_event_types": ["$pageview", "$screen", "custom_event"], - "properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}], + "properties": [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ], }, ) with override_instance_config("PERSON_ON_EVENTS_ENABLED", True): @@ -3012,20 +3852,44 @@ def test_groups_filtering_person_on_events(self): self.assertEqual( response, [ - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) filter = filter.shallow_clone( - {"properties": [{"key": "industry", "value": "technology", "type": "group", "group_type_index": 0}]} + { + "properties": [ + { + "key": "industry", + "value": "technology", + "type": "group", + "group_type_index": 0, + } + ] + } ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/screen1", "target": "2_/screen2", "value": 1, "average_conversion_time": ONE_MINUTE}, + { + "source": "1_/screen1", + "target": "2_/screen2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, { "source": "2_/screen2", "target": "3_/screen3", @@ -3036,14 +3900,28 @@ def test_groups_filtering_person_on_events(self): ) filter = filter.shallow_clone( - {"properties": [{"key": "industry", "value": "technology", "type": "group", "group_type_index": 1}]} + { + "properties": [ + { + "key": "industry", + "value": "technology", + "type": "group", + "group_type_index": 1, + } + ] + } ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual( response, [ - {"source": "1_/custom1", "target": "2_/custom2", "value": 1, "average_conversion_time": ONE_MINUTE}, + { + "source": "1_/custom1", + "target": "2_/custom2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, { "source": "2_/custom2", "target": "3_/custom3", @@ -3137,8 +4015,18 @@ def test_person_on_events_v2(self): [ # we expect 1s for the "value"s because the two persons above are actually the same person # due to the override - {"source": "1_/1", "target": "2_/2", "value": 1, "average_conversion_time": ONE_MINUTE}, - {"source": "2_/2", "target": "3_/3", "value": 1, "average_conversion_time": 2 * ONE_MINUTE}, + { + "source": "1_/1", + "target": "2_/2", + "value": 1, + "average_conversion_time": ONE_MINUTE, + }, + { + "source": "2_/2", + "target": "3_/3", + "value": 1, + "average_conversion_time": 2 * ONE_MINUTE, + }, ], ) @@ -3149,7 +4037,11 @@ def test_recording(self): p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"]) events = [ _create_event( - properties={"$current_url": "/1", "$session_id": "s1", "$window_id": "w1"}, + properties={ + "$current_url": "/1", + "$session_id": "s1", + "$window_id": "w1", + }, distinct_id="p1", event="$pageview", team=self.team, @@ -3157,7 +4049,11 @@ def test_recording(self): event_uuid="11111111-1111-1111-1111-111111111111", ), _create_event( - properties={"$current_url": "/2", "$session_id": "s1", "$window_id": "w1"}, + properties={ + "$current_url": "/2", + "$session_id": "s1", + "$window_id": "w1", + }, distinct_id="p1", event="$pageview", team=self.team, @@ -3165,7 +4061,11 @@ def test_recording(self): event_uuid="21111111-1111-1111-1111-111111111111", ), _create_event( - properties={"$current_url": "/1", "$session_id": "s2", "$window_id": "w2"}, + properties={ + "$current_url": "/1", + "$session_id": "s2", + "$window_id": "w2", + }, distinct_id="p1", event="$pageview", team=self.team, @@ -3173,7 +4073,11 @@ def test_recording(self): event_uuid="31111111-1111-1111-1111-111111111111", ), _create_event( - properties={"$current_url": "/2", "$session_id": "s3", "$window_id": "w3"}, + properties={ + "$current_url": "/2", + "$session_id": "s3", + "$window_id": "w3", + }, distinct_id="p1", event="$pageview", team=self.team, @@ -3202,7 +4106,11 @@ def test_recording(self): p2 = _create_person(team_id=self.team.pk, distinct_ids=["p2"]) events += [ _create_event( - properties={"$current_url": "/1", "$session_id": "s5", "$window_id": "w1"}, + properties={ + "$current_url": "/1", + "$session_id": "s5", + "$window_id": "w1", + }, distinct_id="p2", event="$pageview", team=self.team, @@ -3210,7 +4118,11 @@ def test_recording(self): event_uuid="51111111-1111-1111-1111-111111111111", ), _create_event( - properties={"$current_url": "/2", "$session_id": "s5", "$window_id": "w1"}, + properties={ + "$current_url": "/2", + "$session_id": "s5", + "$window_id": "w1", + }, distinct_id="p2", event="$pageview", team=self.team, @@ -3265,22 +4177,26 @@ def test_recording(self): def test_recording_with_no_window_or_session_id(self): p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"]) - _create_event( - properties={"$current_url": "/1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp=timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f"), - event_uuid="11111111-1111-1111-1111-111111111111", - ), - _create_event( - properties={"$current_url": "/2"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp=(timezone.now() + timedelta(minutes=1)).strftime("%Y-%m-%d %H:%M:%S.%f"), - event_uuid="21111111-1111-1111-1111-111111111111", - ), + ( + _create_event( + properties={"$current_url": "/1"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp=timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f"), + event_uuid="11111111-1111-1111-1111-111111111111", + ), + ) + ( + _create_event( + properties={"$current_url": "/2"}, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp=(timezone.now() + timedelta(minutes=1)).strftime("%Y-%m-%d %H:%M:%S.%f"), + event_uuid="21111111-1111-1111-1111-111111111111", + ), + ) filter = PathFilter( team=self.team, @@ -3301,30 +4217,48 @@ def test_recording_with_no_window_or_session_id(self): def test_recording_with_start_and_end(self): p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"]) - _create_event( - properties={"$current_url": "/1", "$session_id": "s1", "$window_id": "w1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp=timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f"), - event_uuid="11111111-1111-1111-1111-111111111111", - ), - _create_event( - properties={"$current_url": "/2", "$session_id": "s1", "$window_id": "w1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp=(timezone.now() + timedelta(minutes=1)).strftime("%Y-%m-%d %H:%M:%S.%f"), - event_uuid="21111111-1111-1111-1111-111111111111", - ), - _create_event( - properties={"$current_url": "/3", "$session_id": "s1", "$window_id": "w1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp=(timezone.now() + timedelta(minutes=2)).strftime("%Y-%m-%d %H:%M:%S.%f"), - event_uuid="31111111-1111-1111-1111-111111111111", - ), + ( + _create_event( + properties={ + "$current_url": "/1", + "$session_id": "s1", + "$window_id": "w1", + }, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp=timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f"), + event_uuid="11111111-1111-1111-1111-111111111111", + ), + ) + ( + _create_event( + properties={ + "$current_url": "/2", + "$session_id": "s1", + "$window_id": "w1", + }, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp=(timezone.now() + timedelta(minutes=1)).strftime("%Y-%m-%d %H:%M:%S.%f"), + event_uuid="21111111-1111-1111-1111-111111111111", + ), + ) + ( + _create_event( + properties={ + "$current_url": "/3", + "$session_id": "s1", + "$window_id": "w1", + }, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp=(timezone.now() + timedelta(minutes=2)).strftime("%Y-%m-%d %H:%M:%S.%f"), + event_uuid="31111111-1111-1111-1111-111111111111", + ), + ) timestamp = timezone.now() produce_replay_summary( @@ -3372,30 +4306,48 @@ def test_recording_with_start_and_end(self): def test_recording_for_dropoff(self): p1 = _create_person(team_id=self.team.pk, distinct_ids=["p1"]) - _create_event( - properties={"$current_url": "/1", "$session_id": "s1", "$window_id": "w1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp=timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f"), - event_uuid="11111111-1111-1111-1111-111111111111", - ), - _create_event( - properties={"$current_url": "/2", "$session_id": "s1", "$window_id": "w1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp=(timezone.now() + timedelta(minutes=1)).strftime("%Y-%m-%d %H:%M:%S.%f"), - event_uuid="21111111-1111-1111-1111-111111111111", - ), - _create_event( - properties={"$current_url": "/3", "$session_id": "s1", "$window_id": "w1"}, - distinct_id="p1", - event="$pageview", - team=self.team, - timestamp=(timezone.now() + timedelta(minutes=2)).strftime("%Y-%m-%d %H:%M:%S.%f"), - event_uuid="31111111-1111-1111-1111-111111111111", - ), + ( + _create_event( + properties={ + "$current_url": "/1", + "$session_id": "s1", + "$window_id": "w1", + }, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp=timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f"), + event_uuid="11111111-1111-1111-1111-111111111111", + ), + ) + ( + _create_event( + properties={ + "$current_url": "/2", + "$session_id": "s1", + "$window_id": "w1", + }, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp=(timezone.now() + timedelta(minutes=1)).strftime("%Y-%m-%d %H:%M:%S.%f"), + event_uuid="21111111-1111-1111-1111-111111111111", + ), + ) + ( + _create_event( + properties={ + "$current_url": "/3", + "$session_id": "s1", + "$window_id": "w1", + }, + distinct_id="p1", + event="$pageview", + team=self.team, + timestamp=(timezone.now() + timedelta(minutes=2)).strftime("%Y-%m-%d %H:%M:%S.%f"), + event_uuid="31111111-1111-1111-1111-111111111111", + ), + ) timestamp = timezone.now() produce_replay_summary( @@ -3501,7 +4453,12 @@ def test_wildcard_groups_with_sampling(self): "value": 10, "average_conversion_time": 160000, }, - {"source": "3_step two", "target": "4_step three", "value": 5, "average_conversion_time": ONE_MINUTE}, + { + "source": "3_step two", + "target": "4_step three", + "value": 5, + "average_conversion_time": ONE_MINUTE, + }, ], ) diff --git a/ee/clickhouse/queries/test/test_person_query.py b/ee/clickhouse/queries/test/test_person_query.py index 793a51827af79..bd2a280d40f03 100644 --- a/ee/clickhouse/queries/test/test_person_query.py +++ b/ee/clickhouse/queries/test/test_person_query.py @@ -40,7 +40,11 @@ def testdata(db, team): _create_person( distinct_ids=["3"], team_id=team.pk, - properties={"email": "karl@example.com", "$os": "windows", "$browser": "mozilla"}, + properties={ + "email": "karl@example.com", + "$os": "windows", + "$browser": "mozilla", + }, ) @@ -54,7 +58,12 @@ def test_person_query(testdata, team, snapshot): data={ "properties": [ {"key": "event_prop", "value": "value"}, - {"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}, + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + }, ] } ) @@ -67,7 +76,11 @@ def test_person_query_with_multiple_cohorts(testdata, team, snapshot): filter = Filter(data={"properties": []}) for i in range(10): - _create_person(team_id=team.pk, distinct_ids=[f"person{i}"], properties={"group": i, "email": f"{i}@hey.com"}) + _create_person( + team_id=team.pk, + distinct_ids=[f"person{i}"], + properties={"group": i, "email": f"{i}@hey.com"}, + ) cohort1 = Cohort.objects.create( team=team, @@ -97,7 +110,11 @@ def test_person_query_with_multiple_cohorts(testdata, team, snapshot): { "type": "OR", "values": [ - {"key": "group", "value": [1, 2, 3, 4, 5, 6], "type": "person"}, + { + "key": "group", + "value": [1, 2, 3, 4, 5, 6], + "type": "person", + }, ], } ], @@ -117,7 +134,12 @@ def test_person_query_with_multiple_cohorts(testdata, team, snapshot): filter = Filter( data={ "properties": [ - {"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}, + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + }, ] } ) @@ -125,7 +147,12 @@ def test_person_query_with_multiple_cohorts(testdata, team, snapshot): filter2 = Filter( data={ "properties": [ - {"key": "email", "type": "person", "value": "hey", "operator": "icontains"}, + { + "key": "email", + "type": "person", + "value": "hey", + "operator": "icontains", + }, ] } ) @@ -134,7 +161,10 @@ def test_person_query_with_multiple_cohorts(testdata, team, snapshot): # 3 rows because the intersection between cohorts 1 and 2 is person1, person2, and person3, # with their respective group properties - assert run_query(team, filter2, cohort_filters=cohort_filters) == {"rows": 3, "columns": 1} + assert run_query(team, filter2, cohort_filters=cohort_filters) == { + "rows": 3, + "columns": 1, + } assert person_query(team, filter2, cohort_filters=cohort_filters) == snapshot @@ -145,9 +175,24 @@ def test_person_query_with_anded_property_groups(testdata, team, snapshot): "type": "AND", "values": [ {"key": "event_prop", "value": "value"}, - {"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}, - {"key": "$os", "type": "person", "value": "windows", "operator": "exact"}, - {"key": "$browser", "type": "person", "value": "chrome", "operator": "exact"}, + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + }, + { + "key": "$os", + "type": "person", + "value": "windows", + "operator": "exact", + }, + { + "key": "$browser", + "type": "person", + "value": "chrome", + "operator": "exact", + }, ], } } @@ -166,8 +211,18 @@ def test_person_query_with_and_and_or_property_groups(testdata, team, snapshot): { "type": "OR", "values": [ - {"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}, - {"key": "$browser", "type": "person", "value": "karl", "operator": "icontains"}, + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + }, + { + "key": "$browser", + "type": "person", + "value": "karl", + "operator": "icontains", + }, ], }, { @@ -195,7 +250,14 @@ def test_person_query_with_and_and_or_property_groups(testdata, team, snapshot): def test_person_query_with_extra_requested_fields(testdata, team, snapshot): filter = Filter( data={ - "properties": [{"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}], + "properties": [ + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + } + ], "breakdown": "person_prop_4326", "breakdown_type": "person", } @@ -215,7 +277,14 @@ def test_person_query_with_entity_filters(testdata, team, snapshot): "events": [ { "id": "$pageview", - "properties": [{"key": "email", "type": "person", "value": "karl", "operator": "icontains"}], + "properties": [ + { + "key": "email", + "type": "person", + "value": "karl", + "operator": "icontains", + } + ], } ] } @@ -225,16 +294,31 @@ def test_person_query_with_entity_filters(testdata, team, snapshot): assert run_query(team, filter) == {"rows": 3, "columns": 2} assert person_query(team, filter, entity=filter.entities[0]) == snapshot - assert run_query(team, filter, entity=filter.entities[0]) == {"rows": 1, "columns": 1} + assert run_query(team, filter, entity=filter.entities[0]) == { + "rows": 1, + "columns": 1, + } def test_person_query_with_extra_fields(testdata, team, snapshot): filter = Filter( - data={"properties": [{"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}]} + data={ + "properties": [ + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + } + ] + } ) assert person_query(team, filter, extra_fields=["person_props", "pmat_email"]) == snapshot - assert run_query(team, filter, extra_fields=["person_props", "pmat_email"]) == {"rows": 2, "columns": 3} + assert run_query(team, filter, extra_fields=["person_props", "pmat_email"]) == { + "rows": 2, + "columns": 3, + } def test_person_query_with_entity_filters_and_property_group_filters(testdata, team, snapshot): @@ -246,8 +330,18 @@ def test_person_query_with_entity_filters_and_property_group_filters(testdata, t "properties": { "type": "OR", "values": [ - {"key": "email", "type": "person", "value": "marius", "operator": "icontains"}, - {"key": "$os", "type": "person", "value": "windows", "operator": "icontains"}, + { + "key": "email", + "type": "person", + "value": "marius", + "operator": "icontains", + }, + { + "key": "$os", + "type": "person", + "value": "windows", + "operator": "icontains", + }, ], }, } @@ -258,15 +352,30 @@ def test_person_query_with_entity_filters_and_property_group_filters(testdata, t { "type": "OR", "values": [ - {"key": "email", "type": "person", "value": "posthog", "operator": "icontains"}, - {"key": "$browser", "type": "person", "value": "karl", "operator": "icontains"}, + { + "key": "email", + "type": "person", + "value": "posthog", + "operator": "icontains", + }, + { + "key": "$browser", + "type": "person", + "value": "karl", + "operator": "icontains", + }, ], }, { "type": "OR", "values": [ {"key": "event_prop", "value": "value"}, - {"key": "$os", "type": "person", "value": "windows", "operator": "exact"}, + { + "key": "$os", + "type": "person", + "value": "windows", + "operator": "exact", + }, ], }, ], @@ -278,7 +387,10 @@ def test_person_query_with_entity_filters_and_property_group_filters(testdata, t assert run_query(team, filter) == {"rows": 2, "columns": 3} assert person_query(team, filter, entity=filter.entities[0]) == snapshot - assert run_query(team, filter, entity=filter.entities[0]) == {"rows": 2, "columns": 2} + assert run_query(team, filter, entity=filter.entities[0]) == { + "rows": 2, + "columns": 2, + } def test_person_query_with_updated_after(testdata, team, snapshot): diff --git a/ee/clickhouse/queries/test/test_property_optimizer.py b/ee/clickhouse/queries/test/test_property_optimizer.py index f6f40881a01e9..907c035b64b39 100644 --- a/ee/clickhouse/queries/test/test_property_optimizer.py +++ b/ee/clickhouse/queries/test/test_property_optimizer.py @@ -8,7 +8,13 @@ {"key": "person_prop", "value": "efg", "type": "person"}, {"key": "id", "value": 1, "type": "cohort"}, {"key": "tag_name", "value": ["label"], "operator": "exact", "type": "element"}, - {"key": "group_prop", "value": ["value"], "operator": "exact", "type": "group", "group_type_index": 2}, + { + "key": "group_prop", + "value": ["value"], + "operator": "exact", + "type": "group", + "group_type_index": 2, + }, ] BASE_FILTER = Filter({"events": [{"id": "$pageview", "type": "events", "order": 0}]}) @@ -18,7 +24,6 @@ class TestPersonPropertySelector(unittest.TestCase): def test_basic_selector(self): - filter = BASE_FILTER.shallow_clone( { "properties": { @@ -33,7 +38,6 @@ def test_basic_selector(self): self.assertTrue(PropertyOptimizer.using_only_person_properties(filter.property_groups)) def test_multilevel_selector(self): - filter = BASE_FILTER.shallow_clone( { "properties": { @@ -42,15 +46,31 @@ def test_multilevel_selector(self): { "type": "OR", "values": [ - {"key": "event_prop2", "value": ["foo2", "bar2"], "type": "event"}, - {"key": "person_prop2", "value": "efg2", "type": "person"}, + { + "key": "event_prop2", + "value": ["foo2", "bar2"], + "type": "event", + }, + { + "key": "person_prop2", + "value": "efg2", + "type": "person", + }, ], }, { "type": "AND", "values": [ - {"key": "event_prop", "value": ["foo", "bar"], "type": "event"}, - {"key": "person_prop", "value": "efg", "type": "person"}, + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "event", + }, + { + "key": "person_prop", + "value": "efg", + "type": "person", + }, ], }, ], @@ -61,7 +81,6 @@ def test_multilevel_selector(self): self.assertFalse(PropertyOptimizer.using_only_person_properties(filter.property_groups)) def test_multilevel_selector_with_valid_OR_persons(self): - filter = BASE_FILTER.shallow_clone( { "properties": { @@ -70,15 +89,31 @@ def test_multilevel_selector_with_valid_OR_persons(self): { "type": "OR", "values": [ - {"key": "event_prop2", "value": ["foo2", "bar2"], "type": "person"}, - {"key": "person_prop2", "value": "efg2", "type": "person"}, + { + "key": "event_prop2", + "value": ["foo2", "bar2"], + "type": "person", + }, + { + "key": "person_prop2", + "value": "efg2", + "type": "person", + }, ], }, { "type": "AND", "values": [ - {"key": "event_prop", "value": ["foo", "bar"], "type": "person"}, - {"key": "person_prop", "value": "efg", "type": "person"}, + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "person", + }, + { + "key": "person_prop", + "value": "efg", + "type": "person", + }, ], }, ], @@ -90,7 +125,6 @@ def test_multilevel_selector_with_valid_OR_persons(self): class TestPersonPushdown(unittest.TestCase): - maxDiff = None def test_basic_pushdowns(self): @@ -102,7 +136,11 @@ def test_basic_pushdowns(self): assert outer is not None self.assertEqual( - inner.to_dict(), {"type": "AND", "values": [{"key": "person_prop", "value": "efg", "type": "person"}]} + inner.to_dict(), + { + "type": "AND", + "values": [{"key": "person_prop", "value": "efg", "type": "person"}], + }, ) self.assertEqual( @@ -112,7 +150,12 @@ def test_basic_pushdowns(self): "values": [ {"key": "event_prop", "value": ["foo", "bar"], "type": "event"}, {"key": "id", "value": 1, "type": "cohort"}, - {"key": "tag_name", "value": ["label"], "operator": "exact", "type": "element"}, + { + "key": "tag_name", + "value": ["label"], + "operator": "exact", + "type": "element", + }, { "key": "group_prop", "value": ["value"], @@ -133,15 +176,31 @@ def test_person_properties_mixed_with_event_properties(self): { "type": "OR", "values": [ - {"key": "event_prop2", "value": ["foo2", "bar2"], "type": "event"}, - {"key": "person_prop2", "value": "efg2", "type": "person"}, + { + "key": "event_prop2", + "value": ["foo2", "bar2"], + "type": "event", + }, + { + "key": "person_prop2", + "value": "efg2", + "type": "person", + }, ], }, { "type": "AND", "values": [ - {"key": "event_prop", "value": ["foo", "bar"], "type": "event"}, - {"key": "person_prop", "value": "efg", "type": "person"}, + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "event", + }, + { + "key": "person_prop", + "value": "efg", + "type": "person", + }, ], }, ], @@ -160,7 +219,12 @@ def test_person_properties_mixed_with_event_properties(self): inner.to_dict(), { "type": "AND", - "values": [{"type": "AND", "values": [{"key": "person_prop", "value": "efg", "type": "person"}]}], + "values": [ + { + "type": "AND", + "values": [{"key": "person_prop", "value": "efg", "type": "person"}], + } + ], }, ) @@ -172,14 +236,22 @@ def test_person_properties_mixed_with_event_properties(self): { "type": "OR", "values": [ - {"key": "event_prop2", "value": ["foo2", "bar2"], "type": "event"}, + { + "key": "event_prop2", + "value": ["foo2", "bar2"], + "type": "event", + }, {"key": "person_prop2", "value": "efg2", "type": "person"}, ], }, { "type": "AND", "values": [ - {"key": "event_prop", "value": ["foo", "bar"], "type": "event"}, + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "event", + }, # {"key": "person_prop", "value": "efg", "type": "person", }, # this was pushed down ], }, @@ -196,15 +268,31 @@ def test_person_properties_with_or_not_mixed_with_event_properties(self): { "type": "OR", "values": [ - {"key": "person_prop2", "value": ["foo2", "bar2"], "type": "person"}, - {"key": "person_prop2", "value": "efg2", "type": "person"}, + { + "key": "person_prop2", + "value": ["foo2", "bar2"], + "type": "person", + }, + { + "key": "person_prop2", + "value": "efg2", + "type": "person", + }, ], }, { "type": "AND", "values": [ - {"key": "event_prop", "value": ["foo", "bar"], "type": "event"}, - {"key": "person_prop", "value": "efg", "type": "person"}, + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "event", + }, + { + "key": "person_prop", + "value": "efg", + "type": "person", + }, ], }, ], @@ -227,11 +315,18 @@ def test_person_properties_with_or_not_mixed_with_event_properties(self): { "type": "OR", "values": [ - {"key": "person_prop2", "value": ["foo2", "bar2"], "type": "person"}, + { + "key": "person_prop2", + "value": ["foo2", "bar2"], + "type": "person", + }, {"key": "person_prop2", "value": "efg2", "type": "person"}, ], }, - {"type": "AND", "values": [{"key": "person_prop", "value": "efg", "type": "person"}]}, + { + "type": "AND", + "values": [{"key": "person_prop", "value": "efg", "type": "person"}], + }, ], }, ) @@ -245,7 +340,11 @@ def test_person_properties_with_or_not_mixed_with_event_properties(self): { "type": "AND", "values": [ - {"key": "event_prop", "value": ["foo", "bar"], "type": "event"}, + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "event", + }, # {"key": "person_prop", "value": "efg", "type": "person", }, # this was pushed down ], } @@ -282,7 +381,13 @@ def test_person_properties_mixed_with_event_properties_with_misdirection_using_n }, { "type": "AND", - "values": [{"key": "person_prop2", "value": "efg2", "type": "person"}], + "values": [ + { + "key": "person_prop2", + "value": "efg2", + "type": "person", + } + ], }, ], } @@ -296,7 +401,13 @@ def test_person_properties_mixed_with_event_properties_with_misdirection_using_n "values": [ { "type": "AND", - "values": [{"key": "event_prop", "value": ["foo", "bar"], "type": "event"}], + "values": [ + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "event", + } + ], } ], }, @@ -309,7 +420,11 @@ def test_person_properties_mixed_with_event_properties_with_misdirection_using_n { "type": "OR", "values": [ - {"key": "person_prop", "value": "efg", "type": "person"} + { + "key": "person_prop", + "value": "efg", + "type": "person", + } ], } ], @@ -346,7 +461,13 @@ def test_person_properties_mixed_with_event_properties_with_misdirection_using_n "values": [ { "type": "OR", - "values": [{"key": "person_prop", "value": "efg", "type": "person"}], + "values": [ + { + "key": "person_prop", + "value": "efg", + "type": "person", + } + ], } ], } @@ -375,14 +496,24 @@ def test_person_properties_mixed_with_event_properties_with_misdirection_using_n { "type": "OR", "values": [ - {"key": "event_prop2", "value": ["foo2", "bar2"], "type": "event"} + { + "key": "event_prop2", + "value": ["foo2", "bar2"], + "type": "event", + } ], } ], }, { "type": "AND", - "values": [{"key": "person_prop2", "value": "efg2", "type": "person"}], + "values": [ + { + "key": "person_prop2", + "value": "efg2", + "type": "person", + } + ], }, ], } @@ -396,7 +527,13 @@ def test_person_properties_mixed_with_event_properties_with_misdirection_using_n "values": [ { "type": "AND", - "values": [{"key": "event_prop", "value": ["foo", "bar"], "type": "event"}], + "values": [ + { + "key": "event_prop", + "value": ["foo", "bar"], + "type": "event", + } + ], } ], }, diff --git a/ee/clickhouse/queries/test/test_retention.py b/ee/clickhouse/queries/test/test_retention.py index 63d15ca76ec57..91354c4e8caf6 100644 --- a/ee/clickhouse/queries/test/test_retention.py +++ b/ee/clickhouse/queries/test/test_retention.py @@ -8,7 +8,12 @@ from posthog.models.instance_setting import override_instance_config from posthog.models.person import Person from posthog.queries.retention import Retention -from posthog.queries.test.test_retention import _create_event, _create_events, _date, pluck +from posthog.queries.test.test_retention import ( + _create_event, + _create_events, + _date, + pluck, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -23,11 +28,31 @@ def _create_groups_and_events(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:2", properties={}) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:1", + properties={}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:2", + properties={}, + ) Person.objects.create(team=self.team, distinct_ids=["person1", "alias1"]) Person.objects.create(team=self.team, distinct_ids=["person2"]) @@ -44,11 +69,19 @@ def _create_groups_and_events(self): ("person1", _date(7), {"$group_0": "org:5"}), ("person2", _date(7), {"$group_0": "org:6"}), ("person1", _date(14), {"$group_0": "org:5"}), - ("person1", _date(month=1, day=-6), {"$group_0": "org:5", "$group_1": "company:1"}), + ( + "person1", + _date(month=1, day=-6), + {"$group_0": "org:5", "$group_1": "company:1"}, + ), ("person2", _date(month=1, day=-6), {"$group_0": "org:6"}), ("person2", _date(month=1, day=1), {"$group_0": "org:6"}), ("person1", _date(month=1, day=1), {"$group_0": "org:5"}), - ("person2", _date(month=1, day=15), {"$group_0": "org:6", "$group_1": "company:1"}), + ( + "person2", + _date(month=1, day=15), + {"$group_0": "org:6", "$group_1": "company:1"}, + ), ], ) @@ -62,7 +95,14 @@ def test_groups_filtering(self): "date_to": _date(10, month=1, hour=0), "period": "Week", "total_intervals": 7, - "properties": [{"key": "industry", "value": "technology", "type": "group", "group_type_index": 0}], + "properties": [ + { + "key": "industry", + "value": "technology", + "type": "group", + "group_type_index": 0, + } + ], }, team=self.team, ), @@ -71,7 +111,15 @@ def test_groups_filtering(self): self.assertEqual( pluck(result, "values", "count"), - [[1, 1, 0, 1, 1, 0, 1], [1, 0, 1, 1, 0, 1], [0, 0, 0, 0, 0], [1, 1, 0, 1], [1, 0, 1], [0, 0], [1]], + [ + [1, 1, 0, 1, 1, 0, 1], + [1, 0, 1, 1, 0, 1], + [0, 0, 0, 0, 0], + [1, 1, 0, 1], + [1, 0, 1], + [0, 0], + [1], + ], ) result = Retention().run( @@ -81,7 +129,13 @@ def test_groups_filtering(self): "period": "Week", "total_intervals": 7, "properties": [ - {"key": "industry", "value": "", "type": "group", "group_type_index": 0, "operator": "is_set"} + { + "key": "industry", + "value": "", + "type": "group", + "group_type_index": 0, + "operator": "is_set", + } ], }, team=self.team, @@ -91,7 +145,15 @@ def test_groups_filtering(self): self.assertEqual( pluck(result, "values", "count"), - [[2, 2, 1, 2, 2, 0, 1], [2, 1, 2, 2, 0, 1], [1, 1, 1, 0, 0], [2, 2, 0, 1], [2, 0, 1], [0, 0], [1]], + [ + [2, 2, 1, 2, 2, 0, 1], + [2, 1, 2, 2, 0, 1], + [1, 1, 1, 0, 0], + [2, 2, 0, 1], + [2, 0, 1], + [0, 0], + [1], + ], ) # TODO: Delete this test when moved to person-on-events @@ -111,7 +173,15 @@ def test_groups_aggregating(self): result = Retention().run(filter, self.team) self.assertEqual( pluck(result, "values", "count"), - [[2, 2, 1, 2, 2, 0, 1], [2, 1, 2, 2, 0, 1], [1, 1, 1, 0, 0], [2, 2, 0, 1], [2, 0, 1], [0, 0], [1]], + [ + [2, 2, 1, 2, 2, 0, 1], + [2, 1, 2, 2, 0, 1], + [1, 1, 1, 0, 0], + [2, 2, 0, 1], + [2, 0, 1], + [0, 0], + [1], + ], ) actor_result, _ = Retention().actors_in_period(filter.shallow_clone({"selected_interval": 0}), self.team) @@ -130,7 +200,15 @@ def test_groups_aggregating(self): result = Retention().run(filter, self.team) self.assertEqual( pluck(result, "values", "count"), - [[1, 0, 0, 1, 0, 0, 1], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [1, 0, 0, 1], [0, 0, 0], [0, 0], [1]], + [ + [1, 0, 0, 1, 0, 0, 1], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [1, 0, 0, 1], + [0, 0, 0], + [0, 0], + [1], + ], ) # TODO: Delete this test when moved to person-on-events @@ -170,7 +248,12 @@ def test_groups_filtering_person_on_events(self): "period": "Week", "total_intervals": 7, "properties": [ - {"key": "industry", "value": "technology", "type": "group", "group_type_index": 0} + { + "key": "industry", + "value": "technology", + "type": "group", + "group_type_index": 0, + } ], }, team=self.team, @@ -180,7 +263,15 @@ def test_groups_filtering_person_on_events(self): self.assertEqual( pluck(result, "values", "count"), - [[1, 1, 0, 1, 1, 0, 1], [1, 0, 1, 1, 0, 1], [0, 0, 0, 0, 0], [1, 1, 0, 1], [1, 0, 1], [0, 0], [1]], + [ + [1, 1, 0, 1, 1, 0, 1], + [1, 0, 1, 1, 0, 1], + [0, 0, 0, 0, 0], + [1, 1, 0, 1], + [1, 0, 1], + [0, 0], + [1], + ], ) result = Retention().run( @@ -206,7 +297,15 @@ def test_groups_filtering_person_on_events(self): self.assertEqual( pluck(result, "values", "count"), - [[2, 2, 1, 2, 2, 0, 1], [2, 1, 2, 2, 0, 1], [1, 1, 1, 0, 0], [2, 2, 0, 1], [2, 0, 1], [0, 0], [1]], + [ + [2, 2, 1, 2, 2, 0, 1], + [2, 1, 2, 2, 0, 1], + [1, 1, 1, 0, 0], + [2, 2, 0, 1], + [2, 0, 1], + [0, 0], + [1], + ], ) @override_settings(PERSON_ON_EVENTS_V2_OVERRIDE=True) @@ -340,7 +439,15 @@ def test_groups_filtering_person_on_events_v2(self): # We expect 1s across the board due to the override set up from person1 to person2, making them the same person self.assertEqual( pluck(result, "values", "count"), - [[1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1], [1, 1, 1, 1], [1, 1, 1], [1, 1], [1]], + [ + [1, 1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1, 1], + [1, 1, 1, 1, 1], + [1, 1, 1, 1], + [1, 1, 1], + [1, 1], + [1], + ], ) @also_test_with_materialized_columns(group_properties=[(0, "industry")]) @@ -359,11 +466,18 @@ def test_groups_aggregating_person_on_events(self): ) with override_instance_config("PERSON_ON_EVENTS_ENABLED", True): - result = Retention().run(filter, self.team) self.assertEqual( pluck(result, "values", "count"), - [[2, 2, 1, 2, 2, 0, 1], [2, 1, 2, 2, 0, 1], [1, 1, 1, 0, 0], [2, 2, 0, 1], [2, 0, 1], [0, 0], [1]], + [ + [2, 2, 1, 2, 2, 0, 1], + [2, 1, 2, 2, 0, 1], + [1, 1, 1, 0, 0], + [2, 2, 0, 1], + [2, 0, 1], + [0, 0], + [1], + ], ) actor_result, _ = Retention().actors_in_period(filter.shallow_clone({"selected_interval": 0}), self.team) @@ -383,7 +497,15 @@ def test_groups_aggregating_person_on_events(self): result = Retention().run(filter, self.team) self.assertEqual( pluck(result, "values", "count"), - [[1, 0, 0, 1, 0, 0, 1], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [1, 0, 0, 1], [0, 0, 0], [0, 0], [1]], + [ + [1, 0, 0, 1, 0, 0, 1], + [0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0], + [1, 0, 0, 1], + [0, 0, 0], + [0, 0], + [1], + ], ) @also_test_with_materialized_columns(group_properties=[(0, "industry")]) diff --git a/ee/clickhouse/queries/test/test_util.py b/ee/clickhouse/queries/test/test_util.py index ff102765255d3..02a3719f05752 100644 --- a/ee/clickhouse/queries/test/test_util.py +++ b/ee/clickhouse/queries/test/test_util.py @@ -15,15 +15,40 @@ def test_get_earliest_timestamp(db, team): with freeze_time("2021-01-21") as frozen_time: - _create_event(team=team, event="sign up", distinct_id="1", timestamp="2020-01-04T14:10:00Z") - _create_event(team=team, event="sign up", distinct_id="1", timestamp="2020-01-06T14:10:00Z") + _create_event( + team=team, + event="sign up", + distinct_id="1", + timestamp="2020-01-04T14:10:00Z", + ) + _create_event( + team=team, + event="sign up", + distinct_id="1", + timestamp="2020-01-06T14:10:00Z", + ) assert get_earliest_timestamp(team.id) == datetime(2020, 1, 4, 14, 10, tzinfo=ZoneInfo("UTC")) frozen_time.tick(timedelta(seconds=1)) - _create_event(team=team, event="sign up", distinct_id="1", timestamp="1984-01-06T14:10:00Z") - _create_event(team=team, event="sign up", distinct_id="1", timestamp="2014-01-01T01:00:00Z") - _create_event(team=team, event="sign up", distinct_id="1", timestamp="2015-01-01T01:00:00Z") + _create_event( + team=team, + event="sign up", + distinct_id="1", + timestamp="1984-01-06T14:10:00Z", + ) + _create_event( + team=team, + event="sign up", + distinct_id="1", + timestamp="2014-01-01T01:00:00Z", + ) + _create_event( + team=team, + event="sign up", + distinct_id="1", + timestamp="2015-01-01T01:00:00Z", + ) assert get_earliest_timestamp(team.id) == datetime(2015, 1, 1, 1, tzinfo=ZoneInfo("UTC")) diff --git a/ee/clickhouse/test/test_error.py b/ee/clickhouse/test/test_error.py index ec360196ad35e..18f0f4a859631 100644 --- a/ee/clickhouse/test/test_error.py +++ b/ee/clickhouse/test/test_error.py @@ -14,8 +14,18 @@ "Estimated query execution time (34.5 seconds) is too long.", None, ), - (ServerException("Syntax error", code=62), "CHQueryErrorSyntaxError", "Code: 62.\nSyntax error", 62), - (ServerException("Syntax error", code=9999), "CHQueryErrorUnknownException", "Code: 9999.\nSyntax error", 9999), + ( + ServerException("Syntax error", code=62), + "CHQueryErrorSyntaxError", + "Code: 62.\nSyntax error", + 62, + ), + ( + ServerException("Syntax error", code=9999), + "CHQueryErrorUnknownException", + "Code: 9999.\nSyntax error", + 9999, + ), ], ) def test_wrap_query_error(error, expected_type, expected_message, expected_code): diff --git a/ee/clickhouse/views/experiments.py b/ee/clickhouse/views/experiments.py index b282ad3b83b34..c5d15fa9797ba 100644 --- a/ee/clickhouse/views/experiments.py +++ b/ee/clickhouse/views/experiments.py @@ -9,9 +9,15 @@ from rest_framework.response import Response from statshog.defaults.django import statsd -from ee.clickhouse.queries.experiments.funnel_experiment_result import ClickhouseFunnelExperimentResult -from ee.clickhouse.queries.experiments.secondary_experiment_result import ClickhouseSecondaryExperimentResult -from ee.clickhouse.queries.experiments.trend_experiment_result import ClickhouseTrendExperimentResult +from ee.clickhouse.queries.experiments.funnel_experiment_result import ( + ClickhouseFunnelExperimentResult, +) +from ee.clickhouse.queries.experiments.secondary_experiment_result import ( + ClickhouseSecondaryExperimentResult, +) +from ee.clickhouse.queries.experiments.trend_experiment_result import ( + ClickhouseTrendExperimentResult, +) from ee.clickhouse.queries.experiments.utils import requires_flag_warning from posthog.api.feature_flag import FeatureFlagSerializer, MinimalFeatureFlagSerializer from posthog.api.routing import StructuredViewSetMixin @@ -50,11 +56,20 @@ def _calculate_experiment_results(experiment: Experiment, refresh: bool = False) ).get_results() else: calculate_func = lambda: ClickhouseFunnelExperimentResult( - filter, experiment.team, experiment.feature_flag, experiment.start_date, experiment.end_date + filter, + experiment.team, + experiment.feature_flag, + experiment.start_date, + experiment.end_date, ).get_results() return _experiment_results_cached( - experiment, "primary", filter, calculate_func, refresh=refresh, exposure_filter=exposure_filter + experiment, + "primary", + filter, + calculate_func, + refresh=refresh, + exposure_filter=exposure_filter, ) @@ -63,7 +78,11 @@ def _calculate_secondary_experiment_results(experiment: Experiment, parsed_id: i # TODO: refactor such that ClickhouseSecondaryExperimentResult's get_results doesn't return a dict calculate_func = lambda: ClickhouseSecondaryExperimentResult( - filter, experiment.team, experiment.feature_flag, experiment.start_date, experiment.end_date + filter, + experiment.team, + experiment.feature_flag, + experiment.start_date, + experiment.end_date, ).get_results()["result"] return _experiment_results_cached(experiment, "secondary", filter, calculate_func, refresh=refresh) @@ -97,12 +116,14 @@ def _experiment_results_cached( if cached_result_package and cached_result_package.get("result") and not refresh: cached_result_package["is_cached"] = True statsd.incr( - "posthog_cached_function_cache_hit", tags={"route": "/projects/:id/experiments/:experiment_id/results"} + "posthog_cached_function_cache_hit", + tags={"route": "/projects/:id/experiments/:experiment_id/results"}, ) return cached_result_package statsd.incr( - "posthog_cached_function_cache_miss", tags={"route": "/projects/:id/experiments/:experiment_id/results"} + "posthog_cached_function_cache_miss", + tags={"route": "/projects/:id/experiments/:experiment_id/results"}, ) result = calculate_func() @@ -111,14 +132,17 @@ def _experiment_results_cached( fresh_result_package = {"result": result, "last_refresh": now(), "is_cached": False} update_cached_state( - experiment.team.pk, cache_key, timestamp, fresh_result_package, ttl=EXPERIMENT_RESULTS_CACHE_DEFAULT_TTL + experiment.team.pk, + cache_key, + timestamp, + fresh_result_package, + ttl=EXPERIMENT_RESULTS_CACHE_DEFAULT_TTL, ) return fresh_result_package class ExperimentSerializer(serializers.ModelSerializer): - feature_flag_key = serializers.CharField(source="get_feature_flag_key") created_by = UserBasicSerializer(read_only=True) feature_flag = MinimalFeatureFlagSerializer(read_only=True) @@ -141,7 +165,13 @@ class Meta: "created_at", "updated_at", ] - read_only_fields = ["id", "created_by", "created_at", "updated_at", "feature_flag"] + read_only_fields = [ + "id", + "created_by", + "created_at", + "updated_at", + "feature_flag", + ] def validate_parameters(self, value): if not value: @@ -158,7 +188,6 @@ def validate_parameters(self, value): return value def create(self, validated_data: dict, *args: Any, **kwargs: Any) -> Experiment: - if not validated_data.get("filters"): raise ValidationError("Filters are required to create an Experiment") @@ -233,7 +262,6 @@ def update(self, instance: Experiment, validated_data: dict, *args: Any, **kwarg raise ValidationError(f"Can't update keys: {', '.join(sorted(extra_keys))} on Experiment") if "feature_flag_variants" in validated_data.get("parameters", {}): - if len(validated_data["parameters"]["feature_flag_variants"]) != len(feature_flag.variants): raise ValidationError("Can't update feature_flag_variants on Experiment") @@ -333,7 +361,6 @@ def secondary_results(self, request: Request, *args: Any, **kwargs: Any) -> Resp # ****************************************** @action(methods=["GET"], detail=False) def requires_flag_implementation(self, request: Request, *args: Any, **kwargs: Any) -> Response: - filter = Filter(request=request, team=self.team).shallow_clone({"date_from": "-7d", "date_to": ""}) warning = requires_flag_warning(filter, self.team) diff --git a/ee/clickhouse/views/groups.py b/ee/clickhouse/views/groups.py index 4796e58c2eda6..cf80d8f84cbf1 100644 --- a/ee/clickhouse/views/groups.py +++ b/ee/clickhouse/views/groups.py @@ -36,7 +36,11 @@ class ClickhouseGroupsTypesView(StructuredViewSetMixin, mixins.ListModelMixin, v serializer_class = GroupTypeSerializer queryset = GroupTypeMapping.objects.all().order_by("group_type_index") pagination_class = None - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] sharing_enabled_actions = ["list"] @@ -74,7 +78,11 @@ class ClickhouseGroupsView(StructuredViewSetMixin, mixins.ListModelMixin, viewse serializer_class = GroupSerializer queryset = Group.objects.all() pagination_class = GroupCursorPagination - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] def get_queryset(self): return ( @@ -89,9 +97,17 @@ def get_queryset(self): @extend_schema( parameters=[ OpenApiParameter( - "group_type_index", OpenApiTypes.INT, description="Specify the group type to list", required=True + "group_type_index", + OpenApiTypes.INT, + description="Specify the group type to list", + required=True, + ), + OpenApiParameter( + "search", + OpenApiTypes.STR, + description="Search the group name", + required=True, ), - OpenApiParameter("search", OpenApiTypes.STR, description="Search the group name", required=True), ] ) def list(self, request, *args, **kwargs): @@ -123,10 +139,16 @@ def list(self, request, *args, **kwargs): @extend_schema( parameters=[ OpenApiParameter( - "group_type_index", OpenApiTypes.INT, description="Specify the group type to find", required=True + "group_type_index", + OpenApiTypes.INT, + description="Specify the group type to find", + required=True, ), OpenApiParameter( - "group_key", OpenApiTypes.STR, description="Specify the key of the group to find", required=True + "group_key", + OpenApiTypes.STR, + description="Specify the key of the group to find", + required=True, ), ] ) @@ -142,10 +164,16 @@ def find(self, request: request.Request, **kw) -> response.Response: @extend_schema( parameters=[ OpenApiParameter( - "group_type_index", OpenApiTypes.INT, description="Specify the group type to find", required=True + "group_type_index", + OpenApiTypes.INT, + description="Specify the group type to find", + required=True, ), OpenApiParameter( - "id", OpenApiTypes.STR, description="Specify the id of the user to find groups for", required=True + "id", + OpenApiTypes.STR, + description="Specify the id of the user to find groups for", + required=True, ), ] ) @@ -186,7 +214,10 @@ def property_definitions(self, request: request.Request, **kw): required=True, ), OpenApiParameter( - "key", OpenApiTypes.STR, description="Specify the property key to find values for", required=True + "key", + OpenApiTypes.STR, + description="Specify the property key to find values for", + required=True, ), ] ) @@ -201,7 +232,11 @@ def property_values(self, request: request.Request, **kw): GROUP BY tupleElement(keysAndValues, 2) ORDER BY value ASC """, - {"team_id": self.team.pk, "group_type_index": request.GET["group_type_index"], "key": request.GET["key"]}, + { + "team_id": self.team.pk, + "group_type_index": request.GET["group_type_index"], + "key": request.GET["key"], + }, ) return response.Response([{"name": name[0]} for name in rows]) diff --git a/ee/clickhouse/views/person.py b/ee/clickhouse/views/person.py index 12149e88b9193..cbcd536126b55 100644 --- a/ee/clickhouse/views/person.py +++ b/ee/clickhouse/views/person.py @@ -3,9 +3,15 @@ from rest_framework import request, response from rest_framework.decorators import action -from ee.clickhouse.queries.funnels.funnel_correlation_persons import FunnelCorrelationActors +from ee.clickhouse.queries.funnels.funnel_correlation_persons import ( + FunnelCorrelationActors, +) from posthog.api.person import PersonViewSet -from posthog.constants import FUNNEL_CORRELATION_PERSON_LIMIT, FUNNEL_CORRELATION_PERSON_OFFSET, INSIGHT_FUNNELS +from posthog.constants import ( + FUNNEL_CORRELATION_PERSON_LIMIT, + FUNNEL_CORRELATION_PERSON_OFFSET, + INSIGHT_FUNNELS, +) from posthog.decorators import cached_by_filters from posthog.models import Filter from posthog.utils import format_query_params_absolute_url @@ -45,7 +51,14 @@ def calculate_funnel_correlation_persons( initial_url = format_query_params_absolute_url(request, 0) # cached_function expects a dict with the key result - return {"result": (serialized_actors, next_url, initial_url, raw_count - len(serialized_actors))} + return { + "result": ( + serialized_actors, + next_url, + initial_url, + raw_count - len(serialized_actors), + ) + } class LegacyEnterprisePersonViewSet(EnterprisePersonViewSet): diff --git a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel.py b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel.py index bd6cfb6dd6d42..32e26b0be019d 100644 --- a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel.py +++ b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel.py @@ -2,11 +2,20 @@ from datetime import datetime from ee.api.test.base import LicensedTestMixin -from ee.clickhouse.views.test.funnel.util import EventPattern, FunnelRequest, get_funnel_actors_ok, get_funnel_ok +from ee.clickhouse.views.test.funnel.util import ( + EventPattern, + FunnelRequest, + get_funnel_actors_ok, + get_funnel_ok, +) from posthog.constants import INSIGHT_FUNNELS from posthog.models.group.util import create_group from posthog.models.group_type_mapping import GroupTypeMapping -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for @@ -18,11 +27,31 @@ def _create_groups(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:2", properties={}) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:1", + properties={}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:2", + properties={}, + ) @snapshot_clickhouse_queries def test_funnel_aggregation_with_groups(self): @@ -30,7 +59,11 @@ def test_funnel_aggregation_with_groups(self): events_by_person = { "user_1": [ - {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$group_0": "org:5"}}, + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$group_0": "org:5"}, + }, { "event": "user signed up", # same person, different group, so should count as different step 1 in funnel "timestamp": datetime(2020, 1, 10, 14), @@ -76,7 +109,11 @@ def test_funnel_group_aggregation_with_groups_entity_filtering(self): events_by_person = { "user_1": [ - {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$group_0": "org:5"}} + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$group_0": "org:5"}, + } ], "user_2": [ { # different person, same group, so should count as step two in funnel @@ -98,7 +135,12 @@ def test_funnel_group_aggregation_with_groups_entity_filtering(self): params = FunnelRequest( events=json.dumps( [ - EventPattern(id="user signed up", type="events", order=0, properties={"$group_0": "org:5"}), + EventPattern( + id="user signed up", + type="events", + order=0, + properties={"$group_0": "org:5"}, + ), EventPattern(id="paid", type="events", order=1), ] ), @@ -124,7 +166,11 @@ def test_funnel_with_groups_entity_filtering(self): events_by_person = { "user_1": [ - {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$group_0": "org:5"}}, + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$group_0": "org:5"}, + }, { "event": "paid", "timestamp": datetime(2020, 1, 3, 14), @@ -149,7 +195,12 @@ def test_funnel_with_groups_entity_filtering(self): params = FunnelRequest( events=json.dumps( [ - EventPattern(id="user signed up", type="events", order=0, properties={"$group_0": "org:5"}), + EventPattern( + id="user signed up", + type="events", + order=0, + properties={"$group_0": "org:5"}, + ), EventPattern(id="paid", type="events", order=1), ] ), @@ -175,7 +226,11 @@ def test_funnel_with_groups_global_filtering(self): events_by_person = { "user_1": [ - {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$group_0": "org:5"}}, + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$group_0": "org:5"}, + }, { "event": "paid", "timestamp": datetime(2020, 1, 3, 14), @@ -209,7 +264,16 @@ def test_funnel_with_groups_global_filtering(self): date_from="2020-01-01", date_to="2020-01-14", insight=INSIGHT_FUNNELS, - properties=json.dumps([{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}]), + properties=json.dumps( + [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ] + ), ) result = get_funnel_ok(self.client, self.team.pk, params) diff --git a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py index 034e1e0fea86e..fcc51e7c7f7fb 100644 --- a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py +++ b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_correlation.py @@ -102,7 +102,11 @@ def test_event_correlation_endpoint_picks_up_events_for_odds_ratios(self): "result": { "events": [ { - "event": {"event": "watched video", "elements": [], "properties": {}}, + "event": { + "event": "watched video", + "elements": [], + "properties": {}, + }, "failure_count": 1, "success_count": 1, "success_people_url": ANY, @@ -247,7 +251,11 @@ def test_event_correlation_endpoint_does_not_include_funnel_steps(self): team_id=self.team.pk, request=FunnelCorrelationRequest( events=json.dumps( - [EventPattern(id="signup"), EventPattern(id="some waypoint"), EventPattern(id="view insights")] + [ + EventPattern(id="signup"), + EventPattern(id="some waypoint"), + EventPattern(id="view insights"), + ] ), date_to="2020-04-04", ), @@ -331,19 +339,31 @@ def test_events_with_properties_correlation_endpoint_provides_people_drill_down_ "Person 1": [ # Failure / watched {"event": "signup", "timestamp": datetime(2020, 1, 1)}, - {"event": "watched video", "properties": {"$browser": "1"}, "timestamp": datetime(2020, 1, 2)}, + { + "event": "watched video", + "properties": {"$browser": "1"}, + "timestamp": datetime(2020, 1, 2), + }, ], "Person 2": [ # Success / watched {"event": "signup", "timestamp": datetime(2020, 1, 1)}, - {"event": "watched video", "properties": {"$browser": "1"}, "timestamp": datetime(2020, 1, 2)}, + { + "event": "watched video", + "properties": {"$browser": "1"}, + "timestamp": datetime(2020, 1, 2), + }, {"event": "view insights", "timestamp": datetime(2020, 1, 3)}, ], "Person 3": [ # Success / watched. We need to have three event instances # for this test otherwise the endpoint doesn't return results {"event": "signup", "timestamp": datetime(2020, 1, 1)}, - {"event": "watched video", "properties": {"$browser": "1"}, "timestamp": datetime(2020, 1, 2)}, + { + "event": "watched video", + "properties": {"$browser": "1"}, + "timestamp": datetime(2020, 1, 2), + }, {"event": "view insights", "timestamp": datetime(2020, 1, 3)}, ], "Person 4": [ @@ -379,16 +399,35 @@ def test_correlation_endpoint_with_properties(self): self.client.force_login(self.user) for i in range(10): - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Positive"}) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Positive"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") for i in range(10, 20): - _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk, properties={"$browser": "Negative"}) + _create_person( + distinct_ids=[f"user_{i}"], + team_id=self.team.pk, + properties={"$browser": "Negative"}, + ) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) if i % 2 == 0: _create_event( @@ -428,7 +467,11 @@ def test_correlation_endpoint_with_properties(self): result, [ { - "event": {"event": "$browser::Positive", "elements": [], "properties": {}}, + "event": { + "event": "$browser::Positive", + "elements": [], + "properties": {}, + }, "success_count": 10, "failure_count": 0, "success_people_url": ANY, @@ -437,7 +480,11 @@ def test_correlation_endpoint_with_properties(self): "correlation_type": "success", }, { - "event": {"event": "$browser::Negative", "elements": [], "properties": {}}, + "event": { + "event": "$browser::Negative", + "elements": [], + "properties": {}, + }, "success_count": 0, "failure_count": 10, "success_people_url": ANY, @@ -459,8 +506,16 @@ def test_properties_correlation_endpoint_provides_people_drill_down_urls(self): with freeze_time("2020-01-01"): self.client.force_login(self.user) - update_or_create_person(distinct_ids=["Person 1"], team_id=self.team.pk, properties={"$browser": "1"}) - update_or_create_person(distinct_ids=["Person 2"], team_id=self.team.pk, properties={"$browser": "1"}) + update_or_create_person( + distinct_ids=["Person 1"], + team_id=self.team.pk, + properties={"$browser": "1"}, + ) + update_or_create_person( + distinct_ids=["Person 2"], + team_id=self.team.pk, + properties={"$browser": "1"}, + ) events = { "Person 1": [ @@ -546,7 +601,10 @@ def test_funnel_correlation_with_event_properties_autocapture(self): for i in range(3): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) _create_event( team=self.team, @@ -556,12 +614,20 @@ def test_funnel_correlation_with_event_properties_autocapture(self): timestamp="2020-01-03T14:00:00Z", properties={"signup_source": "email", "$event_type": "click"}, ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", + ) # Atleast one person that fails, to ensure we get results _create_person(distinct_ids=[f"user_fail"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_fail", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_fail", + timestamp="2020-01-02T14:00:00Z", ) with freeze_time("2020-01-01"): diff --git a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_person.py b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_person.py index 883079f7200eb..260c1ef6e1767 100644 --- a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_person.py +++ b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_person.py @@ -19,7 +19,12 @@ class TestFunnelPerson(ClickhouseTestMixin, APIBaseTest): def _create_sample_data(self, num, delete=False): - create_group(team_id=self.team.pk, group_type_index=0, group_key="g0", properties={"slug": "g0", "name": "g0"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="g0", + properties={"slug": "g0", "name": "g0"}, + ) for i in range(num): if delete: @@ -58,7 +63,11 @@ def test_basic_format(self): "interval": "day", "actions": json.dumps([]), "events": json.dumps( - [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}] + [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ] ), "properties": json.dumps([]), "funnel_window_days": 14, @@ -89,7 +98,11 @@ def test_funnel_actors_with_groups_search(self): "interval": "day", "actions": json.dumps([]), "events": json.dumps( - [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}] + [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ] ), "properties": json.dumps([]), "funnel_window_days": 14, @@ -114,7 +127,11 @@ def test_basic_pagination(self): "interval": "day", "actions": json.dumps([]), "events": json.dumps( - [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}] + [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ] ), "properties": json.dumps([]), "funnel_window_days": 14, @@ -149,7 +166,11 @@ def test_breakdown_basic_pagination(self): "interval": "day", "actions": json.dumps([]), "events": json.dumps( - [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}] + [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ] ), "properties": json.dumps([]), "funnel_window_days": 14, @@ -190,7 +211,11 @@ def test_basic_pagination_with_deleted(self, delete_person_patch): "interval": "day", "actions": json.dumps([]), "events": json.dumps( - [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}] + [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ] ), "properties": json.dumps([]), "funnel_window_days": 14, @@ -232,7 +257,11 @@ def test_breakdowns(self): "filter_test_accounts": "false", "new_entity": json.dumps([]), "events": json.dumps( - [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}] + [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ] ), "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", @@ -299,7 +328,10 @@ def test_breakdowns(self): self.assertEqual(1, len(people)) self.assertEqual(None, j["next"]) - response = self.client.get("/api/person/funnel/", data={**request_data, "funnel_step_breakdown": "Safari"}) + response = self.client.get( + "/api/person/funnel/", + data={**request_data, "funnel_step_breakdown": "Safari"}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) j = response.json() @@ -319,16 +351,30 @@ def test_pagination(self): for i in range(10): _create_person(distinct_ids=[f"user_{i}"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id=f"user_{i}", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id=f"user_{i}", + timestamp="2020-01-02T14:00:00Z", ) _create_event( - team=self.team, event="positively_related", distinct_id=f"user_{i}", timestamp="2020-01-03T14:00:00Z" + team=self.team, + event="positively_related", + distinct_id=f"user_{i}", + timestamp="2020-01-03T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id=f"user_{i}", + timestamp="2020-01-04T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id=f"user_{i}", timestamp="2020-01-04T14:00:00Z") request_data = { "events": json.dumps( - [{"id": "user signed up", "type": "events", "order": 0}, {"id": "paid", "type": "events", "order": 1}] + [ + {"id": "user signed up", "type": "events", "order": 0}, + {"id": "paid", "type": "events", "order": 1}, + ] ), "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", @@ -339,7 +385,10 @@ def test_pagination(self): "funnel_correlation_person_entity": json.dumps({"id": "positively_related", "type": "events"}), } - response = self.client.get(f"/api/projects/{self.team.pk}/persons/funnel/correlation", data=request_data) + response = self.client.get( + f"/api/projects/{self.team.pk}/persons/funnel/correlation", + data=request_data, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) j = response.json() diff --git a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_trends_person.py b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_trends_person.py index 20d19c5c63444..3459e4bf13ca0 100644 --- a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_trends_person.py +++ b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_trends_person.py @@ -3,14 +3,24 @@ from rest_framework import status from posthog.constants import INSIGHT_FUNNELS, FunnelOrderType, FunnelVizType -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) class TestFunnelTrendsPerson(ClickhouseTestMixin, APIBaseTest): def test_basic_format(self): user_a = _create_person(distinct_ids=["user a"], team=self.team) - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-07 19:00:00") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-07 19:00:00", + ) common_request_data = { "insight": INSIGHT_FUNNELS, @@ -20,7 +30,11 @@ def test_basic_format(self): "date_to": "2021-06-13 23:59:59", "funnel_window_days": 7, "events": json.dumps( - [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}] + [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ] ), "properties": json.dumps([]), "funnel_window_days": 7, @@ -29,17 +43,29 @@ def test_basic_format(self): # 1 user who dropped off starting 2021-06-07 response_1 = self.client.get( - "/api/person/funnel/", data={**common_request_data, "entrance_period_start": "2021-06-07", "drop_off": True} + "/api/person/funnel/", + data={ + **common_request_data, + "entrance_period_start": "2021-06-07", + "drop_off": True, + }, ) response_1_data = response_1.json() self.assertEqual(response_1.status_code, status.HTTP_200_OK) - self.assertEqual([person["id"] for person in response_1_data["results"][0]["people"]], [str(user_a.uuid)]) + self.assertEqual( + [person["id"] for person in response_1_data["results"][0]["people"]], + [str(user_a.uuid)], + ) # No users converted 2021-06-07 response_2 = self.client.get( "/api/person/funnel/", - data={**common_request_data, "entrance_period_start": "2021-06-07 00:00", "drop_off": False}, + data={ + **common_request_data, + "entrance_period_start": "2021-06-07 00:00", + "drop_off": False, + }, ) response_2_data = response_2.json() @@ -48,7 +74,12 @@ def test_basic_format(self): # No users dropped off starting 2021-06-08 response_3 = self.client.get( - "/api/person/funnel/", data={**common_request_data, "entrance_period_start": "2021-06-08", "drop_off": True} + "/api/person/funnel/", + data={ + **common_request_data, + "entrance_period_start": "2021-06-08", + "drop_off": True, + }, ) response_3_data = response_3.json() @@ -59,14 +90,49 @@ def test_strict_order(self): user_a = _create_person(distinct_ids=["user a"], team=self.team) user_b = _create_person(distinct_ids=["user b"], team=self.team) - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-07 19:00:00") - _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-07 19:00:01") - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-07 19:00:02") - _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-07 19:00:03") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-07 19:00:00", + ) + _create_event( + event="step two", + distinct_id="user a", + team=self.team, + timestamp="2021-06-07 19:00:01", + ) + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-07 19:00:02", + ) + _create_event( + event="step three", + distinct_id="user a", + team=self.team, + timestamp="2021-06-07 19:00:03", + ) - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-07 19:00:00") - _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-07 19:00:01") - _create_event(event="step three", distinct_id="user b", team=self.team, timestamp="2021-06-07 19:00:03") + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-07 19:00:00", + ) + _create_event( + event="step two", + distinct_id="user b", + team=self.team, + timestamp="2021-06-07 19:00:01", + ) + _create_event( + event="step three", + distinct_id="user b", + team=self.team, + timestamp="2021-06-07 19:00:03", + ) common_request_data = { "insight": INSIGHT_FUNNELS, @@ -77,7 +143,11 @@ def test_strict_order(self): "funnel_window_days": 7, "funnel_order_type": FunnelOrderType.STRICT, "events": json.dumps( - [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}] + [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ] ), "properties": json.dumps([]), "funnel_window_days": 7, @@ -86,33 +156,73 @@ def test_strict_order(self): # 1 user who dropped off response_1 = self.client.get( - "/api/person/funnel/", data={**common_request_data, "entrance_period_start": "2021-06-07", "drop_off": True} + "/api/person/funnel/", + data={ + **common_request_data, + "entrance_period_start": "2021-06-07", + "drop_off": True, + }, ) response_1_data = response_1.json() self.assertEqual(response_1.status_code, status.HTTP_200_OK) - self.assertEqual([person["id"] for person in response_1_data["results"][0]["people"]], [str(user_a.uuid)]) + self.assertEqual( + [person["id"] for person in response_1_data["results"][0]["people"]], + [str(user_a.uuid)], + ) # 1 user who successfully converted response_1 = self.client.get( "/api/person/funnel/", - data={**common_request_data, "entrance_period_start": "2021-06-07", "drop_off": False}, + data={ + **common_request_data, + "entrance_period_start": "2021-06-07", + "drop_off": False, + }, ) response_1_data = response_1.json() self.assertEqual(response_1.status_code, status.HTTP_200_OK) - self.assertEqual([person["id"] for person in response_1_data["results"][0]["people"]], [str(user_b.uuid)]) + self.assertEqual( + [person["id"] for person in response_1_data["results"][0]["people"]], + [str(user_b.uuid)], + ) def test_unordered(self): user_a = _create_person(distinct_ids=["user a"], team=self.team) user_b = _create_person(distinct_ids=["user b"], team=self.team) - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-07 19:00:00") - _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-07 19:00:03") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-07 19:00:00", + ) + _create_event( + event="step three", + distinct_id="user a", + team=self.team, + timestamp="2021-06-07 19:00:03", + ) - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-07 19:00:00") - _create_event(event="step three", distinct_id="user b", team=self.team, timestamp="2021-06-07 19:00:01") - _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-07 19:00:02") + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-07 19:00:00", + ) + _create_event( + event="step three", + distinct_id="user b", + team=self.team, + timestamp="2021-06-07 19:00:01", + ) + _create_event( + event="step two", + distinct_id="user b", + team=self.team, + timestamp="2021-06-07 19:00:02", + ) common_request_data = { "insight": INSIGHT_FUNNELS, @@ -123,7 +233,11 @@ def test_unordered(self): "funnel_window_days": 7, "funnel_order_type": FunnelOrderType.UNORDERED, "events": json.dumps( - [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}] + [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ] ), "properties": json.dumps([]), "funnel_window_days": 7, @@ -132,19 +246,34 @@ def test_unordered(self): # 1 user who dropped off response_1 = self.client.get( - "/api/person/funnel/", data={**common_request_data, "entrance_period_start": "2021-06-07", "drop_off": True} + "/api/person/funnel/", + data={ + **common_request_data, + "entrance_period_start": "2021-06-07", + "drop_off": True, + }, ) response_1_data = response_1.json() self.assertEqual(response_1.status_code, status.HTTP_200_OK) - self.assertEqual([person["id"] for person in response_1_data["results"][0]["people"]], [str(user_a.uuid)]) + self.assertEqual( + [person["id"] for person in response_1_data["results"][0]["people"]], + [str(user_a.uuid)], + ) # 1 user who successfully converted response_1 = self.client.get( "/api/person/funnel/", - data={**common_request_data, "entrance_period_start": "2021-06-07", "drop_off": False}, + data={ + **common_request_data, + "entrance_period_start": "2021-06-07", + "drop_off": False, + }, ) response_1_data = response_1.json() self.assertEqual(response_1.status_code, status.HTTP_200_OK) - self.assertEqual([person["id"] for person in response_1_data["results"][0]["people"]], [str(user_b.uuid)]) + self.assertEqual( + [person["id"] for person in response_1_data["results"][0]["people"]], + [str(user_b.uuid)], + ) diff --git a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_unordered.py b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_unordered.py index e7bb205b4f27b..db5113b826baa 100644 --- a/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_unordered.py +++ b/ee/clickhouse/views/test/funnel/test_clickhouse_funnel_unordered.py @@ -2,11 +2,20 @@ from datetime import datetime from ee.api.test.base import LicensedTestMixin -from ee.clickhouse.views.test.funnel.util import EventPattern, FunnelRequest, get_funnel_actors_ok, get_funnel_ok +from ee.clickhouse.views.test.funnel.util import ( + EventPattern, + FunnelRequest, + get_funnel_actors_ok, + get_funnel_ok, +) from posthog.constants import INSIGHT_FUNNELS from posthog.models.group.util import create_group from posthog.models.group_type_mapping import GroupTypeMapping -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for @@ -19,15 +28,39 @@ def test_unordered_funnel_with_groups(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:2", properties={}) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:1", + properties={}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:2", + properties={}, + ) events_by_person = { "user_1": [ - {"event": "user signed up", "timestamp": datetime(2020, 1, 3, 14), "properties": {"$group_0": "org:5"}}, + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 3, 14), + "properties": {"$group_0": "org:5"}, + }, { # same person, different group, so should count as different step 1 in funnel "event": "user signed up", "timestamp": datetime(2020, 1, 10, 14), diff --git a/ee/clickhouse/views/test/test_clickhouse_experiment_secondary_results.py b/ee/clickhouse/views/test/test_clickhouse_experiment_secondary_results.py index 793d06d16850f..f4ffb1e60384c 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiment_secondary_results.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiment_secondary_results.py @@ -8,20 +8,48 @@ DEFAULT_JOURNEYS_FOR_PAYLOAD: Dict[str, List[Dict[str, Any]]] = { # For a trend pageview metric - "person1": [{"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}], + "person1": [ + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + } + ], "person2": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + ], + "person3": [ + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + } ], - "person3": [{"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}], # doesn't have feature set "person_out_of_control": [{"event": "$pageview", "timestamp": "2020-01-03"}], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}} + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + } ], # wrong feature set somehow "person_out_of_feature_control": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "ablahebf"}} + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "ablahebf"}, + } ], # for a funnel conversion metric "person1_funnel": [ @@ -122,7 +150,10 @@ "name": "funnels whatever", "filters": { "insight": "funnels", - "events": [{"order": 0, "id": "$pageview_funnel"}, {"order": 1, "id": "$pageleave_funnel"}], + "events": [ + {"order": 0, "id": "$pageview_funnel"}, + {"order": 1, "id": "$pageleave_funnel"}, + ], "properties": [ { "key": "$geoip_country_name", @@ -275,28 +306,76 @@ def test_secondary_metric_results_for_multiple_variants(self): ], # funnel metric second "person1_2": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_2"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test_2"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_2"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test_2"}, + }, ], "person1_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test_1"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test_1"}, + }, ], "person2_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test_1"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test_1"}, + }, ], "person1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + }, ], "person2": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # doesn't have feature set "person_out_of_control": [ @@ -304,18 +383,38 @@ def test_secondary_metric_results_for_multiple_variants(self): {"event": "$pageleave", "timestamp": "2020-01-05"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-08-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-08-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # non-converters with FF "person4": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "test"}, + } ], "person5": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + } ], "person6_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}} + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + } ], }, self.team, @@ -333,27 +432,52 @@ def test_secondary_metric_results_for_multiple_variants(self): "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 25}, - {"key": "test_1", "name": "Test Variant 1", "rollout_percentage": 25}, - {"key": "test_2", "name": "Test Variant 2", "rollout_percentage": 25}, - {"key": "test", "name": "Test Variant 3", "rollout_percentage": 25}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 25, + }, + { + "key": "test_1", + "name": "Test Variant 1", + "rollout_percentage": 25, + }, + { + "key": "test_2", + "name": "Test Variant 2", + "rollout_percentage": 25, + }, + { + "key": "test", + "name": "Test Variant 3", + "rollout_percentage": 25, + }, ] }, "secondary_metrics": [ { "name": "secondary metric", - "filters": {"insight": "trends", "events": [{"order": 0, "id": "$pageview_trend"}]}, + "filters": { + "insight": "trends", + "events": [{"order": 0, "id": "$pageview_trend"}], + }, }, { "name": "funnel metric", "filters": { "insight": "funnels", - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], }, }, ], # target metric insignificant since we're testing secondaries right now - "filters": {"insight": "trends", "events": [{"order": 0, "id": "whatever"}]}, + "filters": { + "insight": "trends", + "events": [{"order": 0, "id": "whatever"}], + }, }, ) @@ -441,28 +565,76 @@ def test_secondary_metric_results_for_multiple_variants_with_trend_count_per_act ], # avg count per user metric second "person1_2": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_2"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_2"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_2"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_2"}, + }, ], "person1_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, ], "person2_1": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "test_1"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "test_1"}, + }, ], "person1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, ], "person2": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, ], # doesn't have feature set "person_out_of_control": [ @@ -470,18 +642,38 @@ def test_secondary_metric_results_for_multiple_variants_with_trend_count_per_act {"event": "$pageleave", "timestamp": "2020-01-05"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-08-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-08-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # non-converters with FF "person4": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "test"}, + } ], "person5": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + } ], "person6_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}} + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + } ], }, self.team, @@ -499,27 +691,55 @@ def test_secondary_metric_results_for_multiple_variants_with_trend_count_per_act "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 25}, - {"key": "test_1", "name": "Test Variant 1", "rollout_percentage": 25}, - {"key": "test_2", "name": "Test Variant 2", "rollout_percentage": 25}, - {"key": "test", "name": "Test Variant 3", "rollout_percentage": 25}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 25, + }, + { + "key": "test_1", + "name": "Test Variant 1", + "rollout_percentage": 25, + }, + { + "key": "test_2", + "name": "Test Variant 2", + "rollout_percentage": 25, + }, + { + "key": "test", + "name": "Test Variant 3", + "rollout_percentage": 25, + }, ] }, "secondary_metrics": [ { "name": "secondary metric", - "filters": {"insight": "trends", "events": [{"order": 0, "id": "$pageview_trend"}]}, + "filters": { + "insight": "trends", + "events": [{"order": 0, "id": "$pageview_trend"}], + }, }, { "name": "funnel metric", "filters": { "insight": "trends", - "events": [{"order": 0, "id": "$pageview", "math": "avg_count_per_actor"}], + "events": [ + { + "order": 0, + "id": "$pageview", + "math": "avg_count_per_actor", + } + ], }, }, ], # target metric insignificant since we're testing secondaries right now - "filters": {"insight": "trends", "events": [{"order": 0, "id": "whatever"}]}, + "filters": { + "insight": "trends", + "events": [{"order": 0, "id": "whatever"}], + }, }, ) @@ -650,13 +870,33 @@ def test_secondary_metric_results_for_multiple_variants_with_trend_count_per_pro }, ], "person2": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, ], # doesn't have feature set "person_out_of_control": [ @@ -664,8 +904,16 @@ def test_secondary_metric_results_for_multiple_variants_with_trend_count_per_pro {"event": "$pageleave", "timestamp": "2020-01-05"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-08-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-08-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], }, self.team, @@ -683,27 +931,56 @@ def test_secondary_metric_results_for_multiple_variants_with_trend_count_per_pro "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 25}, - {"key": "test_1", "name": "Test Variant 1", "rollout_percentage": 25}, - {"key": "test_2", "name": "Test Variant 2", "rollout_percentage": 25}, - {"key": "test", "name": "Test Variant 3", "rollout_percentage": 25}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 25, + }, + { + "key": "test_1", + "name": "Test Variant 1", + "rollout_percentage": 25, + }, + { + "key": "test_2", + "name": "Test Variant 2", + "rollout_percentage": 25, + }, + { + "key": "test", + "name": "Test Variant 3", + "rollout_percentage": 25, + }, ] }, "secondary_metrics": [ { "name": "secondary metric", - "filters": {"insight": "trends", "events": [{"order": 0, "id": "$pageview_trend"}]}, + "filters": { + "insight": "trends", + "events": [{"order": 0, "id": "$pageview_trend"}], + }, }, { "name": "funnel metric", "filters": { "insight": "trends", - "events": [{"order": 0, "id": "$pageview", "math": "avg", "math_property": "mathable"}], + "events": [ + { + "order": 0, + "id": "$pageview", + "math": "avg", + "math_property": "mathable", + } + ], }, }, ], # target metric insignificant since we're testing secondaries right now - "filters": {"insight": "trends", "events": [{"order": 0, "id": "whatever"}]}, + "filters": { + "insight": "trends", + "events": [{"order": 0, "id": "whatever"}], + }, }, ) @@ -842,7 +1119,10 @@ def test_metrics_without_full_flag_information_are_valid(self): "name": "funnels whatever", "filters": { "insight": "funnels", - "events": [{"order": 0, "id": "$pageview_funnel"}, {"order": 1, "id": "$pageleave_funnel"}], + "events": [ + {"order": 0, "id": "$pageview_funnel"}, + {"order": 1, "id": "$pageleave_funnel"}, + ], "properties": [ { "key": "$geoip_country_name", @@ -856,7 +1136,10 @@ def test_metrics_without_full_flag_information_are_valid(self): }, ], # target metric insignificant since we're testing secondaries right now - "filters": {"insight": "trends", "events": [{"order": 0, "id": "whatever"}]}, + "filters": { + "insight": "trends", + "events": [{"order": 0, "id": "whatever"}], + }, }, ) diff --git a/ee/clickhouse/views/test/test_clickhouse_experiments.py b/ee/clickhouse/views/test/test_clickhouse_experiments.py index 7b1d70c046ee3..64e52e41838aa 100644 --- a/ee/clickhouse/views/test/test_clickhouse_experiments.py +++ b/ee/clickhouse/views/test/test_clickhouse_experiments.py @@ -13,7 +13,6 @@ class TestExperimentCRUD(APILicensedTest): - # List experiments def test_can_list_experiments(self): response = self.client.get(f"/api/projects/{self.team.id}/experiments/") @@ -87,7 +86,10 @@ def test_creating_updating_basic_experiment(self): "feature_flag_key": ff_key, "parameters": None, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -109,7 +111,8 @@ def test_creating_updating_basic_experiment(self): # Now update response = self.client.patch( - f"/api/projects/{self.team.id}/experiments/{id}", {"description": "Bazinga", "end_date": end_date} + f"/api/projects/{self.team.id}/experiments/{id}", + {"description": "Bazinga", "end_date": end_date}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -149,7 +152,10 @@ def test_adding_behavioral_cohort_filter_to_experiment_fails(self): "feature_flag_key": ff_key, "parameters": None, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -167,7 +173,10 @@ def test_adding_behavioral_cohort_filter_to_experiment_fails(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json()["type"], "validation_error") - self.assertEqual(response.json()["detail"], "Experiments do not support global filter properties") + self.assertEqual( + response.json()["detail"], + "Experiments do not support global filter properties", + ) def test_invalid_create(self): # Draft experiment @@ -226,15 +235,26 @@ def test_invalid_update(self): # Now update response = self.client.patch( f"/api/projects/{self.team.id}/experiments/{id}", - {"description": "Bazinga", "filters": {}, "feature_flag_key": "new_key"}, # invalid + { + "description": "Bazinga", + "filters": {}, + "feature_flag_key": "new_key", + }, # invalid ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["detail"], "Can't update keys: get_feature_flag_key on Experiment") + self.assertEqual( + response.json()["detail"], + "Can't update keys: get_feature_flag_key on Experiment", + ) def test_cant_reuse_existing_feature_flag(self): ff_key = "a-b-test" FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key=ff_key, created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key=ff_key, + created_by=self.user, ) response = self.client.post( f"/api/projects/{self.team.id}/experiments/", @@ -296,7 +316,8 @@ def test_draft_experiment_doesnt_have_FF_active_even_after_updates(self): # Now update response = self.client.patch( - f"/api/projects/{self.team.id}/experiments/{id}", {"description": "Bazinga", "filters": {}} + f"/api/projects/{self.team.id}/experiments/{id}", + {"description": "Bazinga", "filters": {}}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -306,7 +327,8 @@ def test_draft_experiment_doesnt_have_FF_active_even_after_updates(self): # Now launch experiment response = self.client.patch( - f"/api/projects/{self.team.id}/experiments/{id}", {"start_date": "2021-12-01T10:23"} + f"/api/projects/{self.team.id}/experiments/{id}", + {"start_date": "2021-12-01T10:23"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -355,13 +377,28 @@ def test_create_multivariate_experiment(self): "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 34, + }, ] }, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -391,7 +428,10 @@ def test_create_multivariate_experiment(self): }, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["detail"], "Can't update feature_flag_variants on Experiment") + self.assertEqual( + response.json()["detail"], + "Can't update feature_flag_variants on Experiment", + ) # Allow changing FF rollout %s created_ff = FeatureFlag.objects.get(key=ff_key) @@ -399,7 +439,11 @@ def test_create_multivariate_experiment(self): **created_ff.filters, "multivariate": { "variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 35}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 35, + }, {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, {"key": "test_2", "name": "Test Variant", "rollout_percentage": 32}, ] @@ -413,9 +457,21 @@ def test_create_multivariate_experiment(self): "description": "Bazinga 222", "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 34, + }, ] }, }, @@ -441,15 +497,30 @@ def test_create_multivariate_experiment(self): "description": "Bazinga", "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test2", "name": "Test Variant", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test2", + "name": "Test Variant", + "rollout_percentage": 34, + }, ] }, }, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["detail"], "Can't update feature_flag_variants on Experiment") + self.assertEqual( + response.json()["detail"], + "Can't update feature_flag_variants on Experiment", + ) # Now try updating other parameter keys response = self.client.patch( @@ -472,20 +543,38 @@ def test_creating_invalid_multivariate_experiment_no_control(self): "parameters": { "feature_flag_variants": [ # no control - {"key": "test_0", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 33}, + { + "key": "test_0", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 33, + }, ] }, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["detail"], "Feature flag variants must contain a control variant") + self.assertEqual( + response.json()["detail"], + "Feature flag variants must contain a control variant", + ) def test_deleting_experiment_soft_deletes_feature_flag(self): ff_key = "a-b-tests" @@ -497,7 +586,10 @@ def test_deleting_experiment_soft_deletes_feature_flag(self): "feature_flag_key": ff_key, "parameters": None, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, } @@ -538,7 +630,10 @@ def test_soft_deleting_feature_flag_does_not_delete_experiment(self): "feature_flag_key": ff_key, "parameters": None, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -553,7 +648,10 @@ def test_soft_deleting_feature_flag_does_not_delete_experiment(self): id = response.json()["id"] # Now delete the feature flag - response = self.client.patch(f"/api/projects/{self.team.id}/feature_flags/{created_ff.pk}/", {"deleted": True}) + response = self.client.patch( + f"/api/projects/{self.team.id}/feature_flags/{created_ff.pk}/", + {"deleted": True}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -574,7 +672,10 @@ def test_cant_add_global_properties_to_new_experiment(self): "feature_flag_key": ff_key, "parameters": None, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [ { "key": "industry", @@ -590,7 +691,10 @@ def test_cant_add_global_properties_to_new_experiment(self): ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["detail"], "Experiments do not support global filter properties") + self.assertEqual( + response.json()["detail"], + "Experiments do not support global filter properties", + ) def test_creating_updating_experiment_with_group_aggregation(self): ff_key = "a-b-tests" @@ -604,7 +708,10 @@ def test_creating_updating_experiment_with_group_aggregation(self): "feature_flag_key": ff_key, "parameters": None, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], "aggregation_group_type_index": 1, }, @@ -631,7 +738,10 @@ def test_creating_updating_experiment_with_group_aggregation(self): { "description": "Bazinga", "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], "aggregation_group_type_index": 0, }, @@ -657,7 +767,10 @@ def test_creating_updating_experiment_with_group_aggregation(self): { "description": "Bazinga", "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], # "aggregation_group_type_index": None, # removed key }, @@ -691,7 +804,10 @@ def test_creating_experiment_with_group_aggregation_parameter(self): "aggregation_group_type_index": 0, }, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -717,7 +833,10 @@ def test_creating_experiment_with_group_aggregation_parameter(self): { "description": "Bazinga", "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], "aggregation_group_type_index": 1, }, @@ -749,13 +868,28 @@ def test_used_in_experiment_is_populated_correctly_for_feature_flag_list(self) - "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 34, + }, ] }, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -770,7 +904,11 @@ def test_used_in_experiment_is_populated_correctly_for_feature_flag_list(self) - # add another random feature flag self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - data={"name": f"flag", "key": f"flag_0", "filters": {"groups": [{"rollout_percentage": 5}]}}, + data={ + "name": f"flag", + "key": f"flag_0", + "filters": {"groups": [{"rollout_percentage": 5}]}, + }, format="json", ).json() @@ -804,13 +942,28 @@ def test_create_experiment_updates_feature_flag_cache(self): "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 34, + }, ] }, "filters": { - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -850,9 +1003,21 @@ def test_create_experiment_updates_feature_flag_cache(self): ], "multivariate": { "variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 34, + }, ] }, "aggregation_group_type_index": None, @@ -868,7 +1033,10 @@ def test_create_experiment_updates_feature_flag_cache(self): }, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json()["detail"], "Can't update feature_flag_variants on Experiment") + self.assertEqual( + response.json()["detail"], + "Can't update feature_flag_variants on Experiment", + ) # ensure cache doesn't change either cached_flags = get_feature_flags_for_team_in_cache(self.team.pk) @@ -886,9 +1054,21 @@ def test_create_experiment_updates_feature_flag_cache(self): ], "multivariate": { "variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 34, + }, ] }, "aggregation_group_type_index": None, @@ -902,9 +1082,21 @@ def test_create_experiment_updates_feature_flag_cache(self): "description": "Bazinga", "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 34}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 32}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 34, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 32, + }, ] }, }, @@ -929,9 +1121,21 @@ def test_create_experiment_updates_feature_flag_cache(self): ], "multivariate": { "variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant", + "rollout_percentage": 34, + }, ] }, "aggregation_group_type_index": None, @@ -946,16 +1150,40 @@ def test_experiment_flow_with_event_results(self): journeys_for( { "person1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + }, ], "person2": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # doesn't have feature set "person_out_of_control": [ @@ -963,15 +1191,31 @@ def test_experiment_flow_with_event_results(self): {"event": "$pageleave", "timestamp": "2020-01-05"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-08-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-08-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # non-converters with FF "person4": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "test"}, + } ], "person5": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + } ], }, self.team, @@ -990,7 +1234,10 @@ def test_experiment_flow_with_event_results(self): "parameters": None, "filters": { "insight": "funnels", - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -1023,7 +1270,10 @@ def test_experiment_flow_with_event_results(self): # Variant with test: Beta(2, 3) and control: Beta(3, 1) distribution # The variant has very low probability of being better. self.assertAlmostEqual(response_data["probability"]["test"], 0.114, places=2) - self.assertEqual(response_data["significance_code"], ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + self.assertEqual( + response_data["significance_code"], + ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE, + ) self.assertAlmostEqual(response_data["expected_loss"], 1, places=2) @snapshot_clickhouse_queries @@ -1034,32 +1284,50 @@ def test_experiment_flow_with_event_results_with_hogql_aggregation(self): { "event": "$pageview", "timestamp": "2020-01-02", - "properties": {"$feature/a-b-test": "test", "$account_id": "person1"}, + "properties": { + "$feature/a-b-test": "test", + "$account_id": "person1", + }, }, { "event": "$pageleave", "timestamp": "2020-01-04", - "properties": {"$feature/a-b-test": "test", "$account_id": "person1"}, + "properties": { + "$feature/a-b-test": "test", + "$account_id": "person1", + }, }, { "event": "$pageview", "timestamp": "2020-01-03", - "properties": {"$feature/a-b-test": "control", "$account_id": "person2"}, + "properties": { + "$feature/a-b-test": "control", + "$account_id": "person2", + }, }, { "event": "$pageleave", "timestamp": "2020-01-05", - "properties": {"$feature/a-b-test": "control", "$account_id": "person2"}, + "properties": { + "$feature/a-b-test": "control", + "$account_id": "person2", + }, }, { "event": "$pageview", "timestamp": "2020-01-04", - "properties": {"$feature/a-b-test": "control", "$account_id": "person3"}, + "properties": { + "$feature/a-b-test": "control", + "$account_id": "person3", + }, }, { "event": "$pageleave", "timestamp": "2020-01-05", - "properties": {"$feature/a-b-test": "control", "$account_id": "person3"}, + "properties": { + "$feature/a-b-test": "control", + "$account_id": "person3", + }, }, # doesn't have feature set { @@ -1076,20 +1344,34 @@ def test_experiment_flow_with_event_results_with_hogql_aggregation(self): { "event": "$pageview", "timestamp": "2020-01-03", - "properties": {"$feature/a-b-test": "test", "$account_id": "person4"}, + "properties": { + "$feature/a-b-test": "test", + "$account_id": "person4", + }, }, { "event": "$pageview", "timestamp": "2020-01-04", - "properties": {"$feature/a-b-test": "test", "$account_id": "person5"}, + "properties": { + "$feature/a-b-test": "test", + "$account_id": "person5", + }, }, # doesn't have any properties {"event": "$pageview", "timestamp": "2020-01-03"}, {"event": "$pageleave", "timestamp": "2020-01-05"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-08-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-08-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], }, self.team, @@ -1108,7 +1390,10 @@ def test_experiment_flow_with_event_results_with_hogql_aggregation(self): "parameters": None, "filters": { "insight": "funnels", - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], "funnel_aggregate_by_hogql": "properties.$account_id", }, @@ -1142,23 +1427,50 @@ def test_experiment_flow_with_event_results_with_hogql_aggregation(self): # Variant with test: Beta(2, 3) and control: Beta(3, 1) distribution # The variant has very low probability of being better. self.assertAlmostEqual(response_data["probability"]["test"], 0.114, places=2) - self.assertEqual(response_data["significance_code"], ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + self.assertEqual( + response_data["significance_code"], + ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE, + ) self.assertAlmostEqual(response_data["expected_loss"], 1, places=2) def test_experiment_flow_with_event_results_cached(self): journeys_for( { "person1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + }, ], "person2": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # doesn't have feature set "person_out_of_control": [ @@ -1166,15 +1478,31 @@ def test_experiment_flow_with_event_results_cached(self): {"event": "$pageleave", "timestamp": "2020-01-05"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-08-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-08-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # non-converters with FF "person4": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "test"}, + } ], "person5": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + } ], }, self.team, @@ -1192,7 +1520,10 @@ def test_experiment_flow_with_event_results_cached(self): "parameters": None, "filters": { "insight": "funnels", - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, } @@ -1231,7 +1562,10 @@ def test_experiment_flow_with_event_results_cached(self): # Variant with test: Beta(2, 3) and control: Beta(3, 1) distribution # The variant has very low probability of being better. self.assertAlmostEqual(response_data["probability"]["test"], 0.114, places=2) - self.assertEqual(response_data["significance_code"], ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + self.assertEqual( + response_data["significance_code"], + ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE, + ) self.assertAlmostEqual(response_data["expected_loss"], 1, places=2) response2 = self.client.get(f"/api/projects/{self.team.id}/experiments/{id}/results") @@ -1283,10 +1617,18 @@ def test_experiment_flow_with_event_results_and_events_out_of_time_range_timezon ], # non-converters with FF "person4": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "test"}, + } ], "person5": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + } ], # converted on the same day as end date, but offset by a few minutes. # experiment ended at 10 AM, UTC+1, so this person should not be included. @@ -1320,7 +1662,10 @@ def test_experiment_flow_with_event_results_and_events_out_of_time_range_timezon "parameters": None, "filters": { "insight": "funnels", - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -1361,7 +1706,10 @@ def test_experiment_flow_with_event_results_and_events_out_of_time_range_timezon # Variant with test: Beta(2, 3) and control: Beta(3, 1) distribution # The variant has very low probability of being better. self.assertAlmostEqual(response_data["probability"]["test"], 0.114, places=2) - self.assertEqual(response_data["significance_code"], ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + self.assertEqual( + response_data["significance_code"], + ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE, + ) self.assertAlmostEqual(response_data["expected_loss"], 1, places=2) @snapshot_clickhouse_queries @@ -1371,27 +1719,67 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): "person1_2": [ # one event having the property is sufficient, since first touch breakdown is the default {"event": "$pageview", "timestamp": "2020-01-02", "properties": {}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test_2"}}, - ], - "person1_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {}}, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test_2"}, + }, + ], + "person1_1": [ + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {}, + }, ], "person2_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test_1"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test_1"}, + }, ], "person1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageleave", "timestamp": "2020-01-04", "properties": {}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-04", + "properties": {}, + }, ], "person2": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ {"event": "$pageview", "timestamp": "2020-01-04", "properties": {}}, - {"event": "$pageleave", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageleave", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # doesn't have feature set "person_out_of_control": [ @@ -1399,22 +1787,46 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): {"event": "$pageleave", "timestamp": "2020-01-05"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageleave", "timestamp": "2020-08-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageleave", + "timestamp": "2020-08-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], # non-converters with FF "person4": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "test"}, + } ], "person5": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "test"}} + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "test"}, + } ], "person6_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}} + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + } ], # converters with unknown flag variant set "person_unknown_1": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "unknown_1"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "unknown_1"}, + }, { "event": "$pageleave", "timestamp": "2020-01-04", @@ -1422,7 +1834,11 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): }, ], "person_unknown_2": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "unknown_2"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "unknown_2"}, + }, { "event": "$pageleave", "timestamp": "2020-01-04", @@ -1430,7 +1846,11 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): }, ], "person_unknown_3": [ - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "unknown_3"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "unknown_3"}, + }, { "event": "$pageleave", "timestamp": "2020-01-04", @@ -1453,15 +1873,34 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 25}, - {"key": "test_1", "name": "Test Variant 1", "rollout_percentage": 25}, - {"key": "test_2", "name": "Test Variant 2", "rollout_percentage": 25}, - {"key": "test", "name": "Test Variant 3", "rollout_percentage": 25}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 25, + }, + { + "key": "test_1", + "name": "Test Variant 1", + "rollout_percentage": 25, + }, + { + "key": "test_2", + "name": "Test Variant 2", + "rollout_percentage": 25, + }, + { + "key": "test", + "name": "Test Variant 3", + "rollout_percentage": 25, + }, ] }, "filters": { "insight": "funnels", - "events": [{"order": 0, "id": "$pageview"}, {"order": 1, "id": "$pageleave"}], + "events": [ + {"order": 0, "id": "$pageview"}, + {"order": 1, "id": "$pageleave"}, + ], "properties": [], }, }, @@ -1495,7 +1934,10 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): self.assertAlmostEqual(response_data["probability"]["test_1"], 0.158, places=1) self.assertAlmostEqual(response_data["probability"]["test_2"], 0.324, places=1) self.assertAlmostEqual(response_data["probability"]["control"], 0.486, places=1) - self.assertEqual(response_data["significance_code"], ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE) + self.assertEqual( + response_data["significance_code"], + ExperimentSignificanceCode.NOT_ENOUGH_EXPOSURE, + ) self.assertAlmostEqual(response_data["expected_loss"], 1, places=2) @@ -1507,45 +1949,96 @@ def test_experiment_flow_with_event_results(self): { "person1": [ # 5 counts, single person - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, # exposure measured via $feature_flag_called events { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, ], "person2": [ { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, # 1 exposure, but more absolute counts - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], # doesn't have feature set @@ -1554,15 +2047,25 @@ def test_experiment_flow_with_event_results(self): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "random"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "random", + }, }, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-08-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], }, @@ -1611,21 +2114,47 @@ def test_experiment_flow_with_event_results_with_custom_exposure(self): { "person1": [ # 5 counts, single person - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, # exposure measured via $feature_flag_called events { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, { "event": "custom_exposure_event", @@ -1635,7 +2164,10 @@ def test_experiment_flow_with_event_results_with_custom_exposure(self): { "event": "custom_exposure_event", "timestamp": "2020-01-03", - "properties": {"$feature/a-b-test": "control", "bonk": "no-bonk"}, + "properties": { + "$feature/a-b-test": "control", + "bonk": "no-bonk", + }, }, ], "person2": [ @@ -1645,12 +2177,28 @@ def test_experiment_flow_with_event_results_with_custom_exposure(self): "properties": {"$feature/a-b-test": "control", "bonk": "bonk"}, }, # 1 exposure, but more absolute counts - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "custom_exposure_event", "timestamp": "2020-01-03", @@ -1677,11 +2225,18 @@ def test_experiment_flow_with_event_results_with_custom_exposure(self): }, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-08-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, { "event": "custom_exposure_event", @@ -1771,24 +2326,37 @@ def test_experiment_flow_with_event_results_with_hogql_filter(self): "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test", "hogql": "true"}, }, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, # exposure measured via $feature_flag_called events { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, ], "person2": [ { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, # 1 exposure, but more absolute counts { @@ -1816,12 +2384,18 @@ def test_experiment_flow_with_event_results_with_hogql_filter(self): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], # doesn't have feature set @@ -1830,15 +2404,25 @@ def test_experiment_flow_with_event_results_with_hogql_filter(self): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "random"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "random", + }, }, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-08-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], }, @@ -1862,7 +2446,13 @@ def test_experiment_flow_with_event_results_with_hogql_filter(self): { "order": 0, "id": "$pageview", - "properties": [{"key": "properties.hogql ilike 'true'", "type": "hogql", "value": None}], + "properties": [ + { + "key": "properties.hogql ilike 'true'", + "type": "hogql", + "value": None, + } + ], } ], }, @@ -1894,45 +2484,96 @@ def test_experiment_flow_with_event_results_out_of_timerange_timezone(self): { "person1": [ # 5 counts, single person - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, # exposure measured via $feature_flag_called events { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, ], "person2": [ { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, # 1 exposure, but more absolute counts - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], # doesn't have feature set @@ -1941,15 +2582,25 @@ def test_experiment_flow_with_event_results_out_of_timerange_timezone(self): { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "random"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "random", + }, }, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-08-03", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], # slightly out of time range @@ -1977,12 +2628,18 @@ def test_experiment_flow_with_event_results_out_of_timerange_timezone(self): { "event": "$feature_flag_called", "timestamp": "2020-01-01 06:00:00", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-01 08:00:00", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test", + }, }, ], "person_t2": [ @@ -1994,12 +2651,18 @@ def test_experiment_flow_with_event_results_out_of_timerange_timezone(self): { "event": "$feature_flag_called", "timestamp": "2020-01-06 15:02:00", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-06 16:00:00", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], }, @@ -2051,30 +2714,58 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): journeys_for( { "person1_2": [ - {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_2"}} + { + "event": "$pageview1", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_2"}, + } ], "person1_1": [ - {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}} + { + "event": "$pageview1", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + } ], "person2_1": [ - {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}} + { + "event": "$pageview1", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + } ], # "person1": [ # {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"},}, # ], "person2": [ - {"event": "$pageview1", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}} + { + "event": "$pageview1", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + } ], "person3": [ - {"event": "$pageview1", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}} + { + "event": "$pageview1", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + } ], "person4": [ - {"event": "$pageview1", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}} + { + "event": "$pageview1", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + } ], # doesn't have feature set "person_out_of_control": [{"event": "$pageview1", "timestamp": "2020-01-03"}], "person_out_of_end_date": [ - {"event": "$pageview1", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}} + { + "event": "$pageview1", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + } ], }, self.team, @@ -2092,10 +2783,26 @@ def test_experiment_flow_with_event_results_for_three_test_variants(self): "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 25}, - {"key": "test_1", "name": "Test Variant 1", "rollout_percentage": 25}, - {"key": "test_2", "name": "Test Variant 2", "rollout_percentage": 25}, - {"key": "test", "name": "Test Variant 3", "rollout_percentage": 25}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 25, + }, + { + "key": "test_1", + "name": "Test Variant 1", + "rollout_percentage": 25, + }, + { + "key": "test_2", + "name": "Test Variant 2", + "rollout_percentage": 25, + }, + { + "key": "test", + "name": "Test Variant 3", + "rollout_percentage": 25, + }, ] }, "filters": { @@ -2133,66 +2840,127 @@ def test_experiment_flow_with_event_results_for_two_test_variants_with_varying_e { "person1_2": [ # for count data - {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_2"}}, - {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_2"}}, + { + "event": "$pageview1", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_2"}, + }, + { + "event": "$pageview1", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_2"}, + }, # for exposure counting (counted as 1 only) { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test_2"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test_2", + }, }, { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test_2"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test_2", + }, }, ], "person1_1": [ - {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, + { + "event": "$pageview1", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test_1"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test_1", + }, }, ], "person2_1": [ - {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, - {"event": "$pageview1", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test_1"}}, + { + "event": "$pageview1", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, + { + "event": "$pageview1", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test_1"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "test_1"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "test_1", + }, }, ], "person2": [ - {"event": "$pageview1", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview1", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview1", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview1", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, # 0 exposure shouldn't ideally happen, but it's possible ], "person3": [ - {"event": "$pageview1", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview1", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], "person4": [ - {"event": "$pageview1", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview1", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-01-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], # doesn't have feature set "person_out_of_control": [{"event": "$pageview1", "timestamp": "2020-01-03"}], "person_out_of_end_date": [ - {"event": "$pageview1", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview1", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, { "event": "$feature_flag_called", "timestamp": "2020-08-02", - "properties": {"$feature_flag": "a-b-test", "$feature_flag_response": "control"}, + "properties": { + "$feature_flag": "a-b-test", + "$feature_flag_response": "control", + }, }, ], }, @@ -2211,9 +2979,21 @@ def test_experiment_flow_with_event_results_for_two_test_variants_with_varying_e "feature_flag_key": ff_key, "parameters": { "feature_flag_variants": [ - {"key": "control", "name": "Control Group", "rollout_percentage": 33}, - {"key": "test_1", "name": "Test Variant 1", "rollout_percentage": 33}, - {"key": "test_2", "name": "Test Variant 2", "rollout_percentage": 34}, + { + "key": "control", + "name": "Control Group", + "rollout_percentage": 33, + }, + { + "key": "test_1", + "name": "Test Variant 1", + "rollout_percentage": 33, + }, + { + "key": "test_2", + "name": "Test Variant 2", + "rollout_percentage": 34, + }, ] }, "filters": { @@ -2252,30 +3032,78 @@ def test_experiment_flow_with_avg_count_per_user_event_results(self): { "person1": [ # 5 counts, single person - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-02", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-02", + "properties": {"$feature/a-b-test": "test"}, + }, ], "person2": [ - {"event": "$pageview", "timestamp": "2020-01-03", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, - {"event": "$pageview", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-03", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person3": [ - {"event": "$pageview", "timestamp": "2020-01-04", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-01-04", + "properties": {"$feature/a-b-test": "control"}, + }, ], "person4": [ - {"event": "$pageview", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "test"}}, - {"event": "$pageview", "timestamp": "2020-01-05", "properties": {"$feature/a-b-test": "test"}}, + { + "event": "$pageview", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "test"}, + }, + { + "event": "$pageview", + "timestamp": "2020-01-05", + "properties": {"$feature/a-b-test": "test"}, + }, ], # doesn't have feature set "person_out_of_control": [ {"event": "$pageview", "timestamp": "2020-01-03"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, ], }, self.team, @@ -2294,7 +3122,14 @@ def test_experiment_flow_with_avg_count_per_user_event_results(self): "parameters": None, "filters": { "insight": "TRENDS", - "events": [{"order": 0, "id": "$pageview", "math": "avg_count_per_actor", "name": "$pageview"}], + "events": [ + { + "order": 0, + "id": "$pageview", + "math": "avg_count_per_actor", + "name": "$pageview", + } + ], "properties": [], }, }, @@ -2391,7 +3226,11 @@ def test_experiment_flow_with_avg_count_per_property_value_results(self): {"event": "$pageview", "timestamp": "2020-01-03"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, ], }, self.team, @@ -2410,7 +3249,14 @@ def test_experiment_flow_with_avg_count_per_property_value_results(self): "parameters": None, "filters": { "insight": "TRENDS", - "events": [{"order": 0, "id": "$pageview", "math": "max", "math_property": "mathable"}], + "events": [ + { + "order": 0, + "id": "$pageview", + "math": "max", + "math_property": "mathable", + } + ], "properties": [], }, }, @@ -2507,7 +3353,11 @@ def test_experiment_flow_with_sum_count_per_property_value_results(self): {"event": "$pageview", "timestamp": "2020-01-03"}, ], "person_out_of_end_date": [ - {"event": "$pageview", "timestamp": "2020-08-03", "properties": {"$feature/a-b-test": "control"}}, + { + "event": "$pageview", + "timestamp": "2020-08-03", + "properties": {"$feature/a-b-test": "control"}, + }, ], }, self.team, @@ -2535,7 +3385,14 @@ def test_experiment_flow_with_sum_count_per_property_value_results(self): }, "filters": { "insight": "TRENDS", - "events": [{"order": 0, "id": "$pageview", "math": "sum", "math_property": "mathable"}], + "events": [ + { + "order": 0, + "id": "$pageview", + "math": "sum", + "math_property": "mathable", + } + ], "properties": [], }, }, diff --git a/ee/clickhouse/views/test/test_clickhouse_groups.py b/ee/clickhouse/views/test/test_clickhouse_groups.py index e8ee4c8e15a87..1b3687c0fec61 100644 --- a/ee/clickhouse/views/test/test_clickhouse_groups.py +++ b/ee/clickhouse/views/test/test_clickhouse_groups.py @@ -8,7 +8,12 @@ from posthog.models.organization import Organization from posthog.models.sharing_configuration import SharingConfiguration from posthog.models.team.team import Team -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + snapshot_clickhouse_queries, +) class ClickhouseTestGroupsApi(ClickhouseTestMixin, APIBaseTest): @@ -25,9 +30,17 @@ def test_groups_list(self): ) with freeze_time("2021-05-02"): create_group( - team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"} + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, ) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={"name": "Plankton"}) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:1", + properties={"name": "Plankton"}, + ) response_data = self.client.get(f"/api/projects/{self.team.id}/groups?group_type_index=0").json() self.assertEqual( @@ -45,7 +58,10 @@ def test_groups_list(self): { "created_at": "2021-05-01T00:00:00Z", "group_key": "org:5", - "group_properties": {"industry": "finance", "name": "Mr. Krabs"}, + "group_properties": { + "industry": "finance", + "name": "Mr. Krabs", + }, "group_type_index": 0, }, ], @@ -61,7 +77,10 @@ def test_groups_list(self): { "created_at": "2021-05-01T00:00:00Z", "group_key": "org:5", - "group_properties": {"industry": "finance", "name": "Mr. Krabs"}, + "group_properties": { + "industry": "finance", + "name": "Mr. Krabs", + }, "group_type_index": 0, }, ], @@ -78,7 +97,10 @@ def test_groups_list(self): { "created_at": "2021-05-01T00:00:00Z", "group_key": "org:5", - "group_properties": {"industry": "finance", "name": "Mr. Krabs"}, + "group_properties": { + "industry": "finance", + "name": "Mr. Krabs", + }, "group_type_index": 0, }, ], @@ -90,7 +112,12 @@ def test_groups_list_no_group_type(self): response_data = self.client.get(f"/api/projects/{self.team.id}/groups/").json() self.assertEqual( response_data, - {"type": "validation_error", "attr": "group_type_index", "code": "invalid_input", "detail": mock.ANY}, + { + "type": "validation_error", + "attr": "group_type_index", + "code": "invalid_input", + "detail": mock.ANY, + }, ) @freeze_time("2021-05-02") @@ -101,7 +128,12 @@ def test_retrieve_group(self): group_key="key", properties={"industry": "finance", "name": "Mr. Krabs"}, ) - create_group(team_id=self.team.pk, group_type_index=1, group_key="foo//bar", properties={}) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="foo//bar", + properties={}, + ) fail_response = self.client.get(f"/api/projects/{self.team.id}/groups/find?group_type_index=1&group_key=key") self.assertEqual(fail_response.status_code, 404) @@ -236,9 +268,24 @@ def test_property_definitions(self): group_key="org:5", properties={"industry": "finance", "name": "Mr. Krabs"}, ) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:1", properties={"name": "Plankton"}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="company:2", properties={}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:1", + properties={"name": "Plankton"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:2", + properties={}, + ) response_data = self.client.get(f"/api/projects/{self.team.id}/groups/property_definitions").json() self.assertEqual( @@ -250,9 +297,24 @@ def test_property_definitions(self): ) def test_property_values(self): - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="org:1", properties={"industry": "finance"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="org:1", + properties={"industry": "finance"}, + ) response_data = self.client.get( f"/api/projects/{self.team.id}/groups/property_values/?key=industry&group_type_index=0" ).json() @@ -260,9 +322,24 @@ def test_property_values(self): self.assertEqual(response_data, [{"name": "finance"}, {"name": "technology"}]) def test_empty_property_values(self): - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) - create_group(team_id=self.team.pk, group_type_index=1, group_key="org:1", properties={"industry": "finance"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="org:1", + properties={"industry": "finance"}, + ) response_data = self.client.get( f"/api/projects/{self.team.id}/groups/property_values/?key=name&group_type_index=0" ).json() @@ -278,7 +355,11 @@ def test_update_groups_metadata(self): f"/api/projects/{self.team.id}/groups_types/update_metadata", [ {"group_type_index": 0, "name_singular": "organization!"}, - {"group_type_index": 1, "group_type": "rename attempt", "name_plural": "playlists"}, + { + "group_type_index": 1, + "group_type": "rename attempt", + "name_plural": "playlists", + }, ], ).json() @@ -291,8 +372,18 @@ def test_update_groups_metadata(self): "name_singular": "organization!", "name_plural": None, }, - {"group_type_index": 1, "group_type": "playlist", "name_singular": None, "name_plural": "playlists"}, - {"group_type_index": 2, "group_type": "another", "name_singular": None, "name_plural": None}, + { + "group_type_index": 1, + "group_type": "playlist", + "name_singular": None, + "name_plural": "playlists", + }, + { + "group_type_index": 2, + "group_type": "another", + "name_singular": None, + "name_plural": None, + }, ], ) @@ -312,8 +403,18 @@ def test_list_group_types(self): "name_singular": None, "name_plural": None, }, - {"group_type_index": 1, "group_type": "playlist", "name_singular": None, "name_plural": None}, - {"group_type_index": 2, "group_type": "another", "name_singular": None, "name_plural": None}, + { + "group_type_index": 1, + "group_type": "playlist", + "name_singular": None, + "name_plural": None, + }, + { + "group_type_index": 2, + "group_type": "another", + "name_singular": None, + "name_plural": None, + }, ], ) @@ -328,7 +429,10 @@ def test_cannot_list_group_types_of_another_org(self): response = self.client.get(f"/api/projects/{other_team.id}/groups_types") # No access to this project self.assertEqual(response.status_code, 403, response.json()) - self.assertEqual(response.json(), self.permission_denied_response("You don't have access to the project.")) + self.assertEqual( + response.json(), + self.permission_denied_response("You don't have access to the project."), + ) def test_can_list_group_types_of_another_org_with_sharing_access_token(self): other_org = Organization.objects.create(name="other org") @@ -352,8 +456,18 @@ def test_can_list_group_types_of_another_org_with_sharing_access_token(self): "name_singular": None, "name_plural": None, }, - {"group_type_index": 1, "group_type": "playlist", "name_singular": None, "name_plural": None}, - {"group_type_index": 2, "group_type": "another", "name_singular": None, "name_plural": None}, + { + "group_type_index": 1, + "group_type": "playlist", + "name_singular": None, + "name_plural": None, + }, + { + "group_type_index": 2, + "group_type": "another", + "name_singular": None, + "name_plural": None, + }, ], ) diff --git a/ee/clickhouse/views/test/test_clickhouse_path_person.py b/ee/clickhouse/views/test/test_clickhouse_path_person.py index faf12efda9edb..48fc8a2475c06 100644 --- a/ee/clickhouse/views/test/test_clickhouse_path_person.py +++ b/ee/clickhouse/views/test/test_clickhouse_path_person.py @@ -10,7 +10,12 @@ from posthog.models.instance_setting import get_instance_setting from posthog.models.person import Person from posthog.tasks.calculate_cohort import insert_cohort_from_insight_filter -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) class TestPathPerson(ClickhouseTestMixin, APIBaseTest): @@ -86,7 +91,12 @@ def test_create_paths_cohort(self, _insert_cohort_from_insight_filter): _insert_cohort_from_insight_filter.assert_called_once_with( cohort_id, - {"insight": "PATHS", "filter_test_accounts": "false", "date_from": "2021-05-01", "date_to": "2021-05-10"}, + { + "insight": "PATHS", + "filter_test_accounts": "false", + "date_from": "2021-05-01", + "date_to": "2021-05-10", + }, ) insert_cohort_from_insight_filter(cohort_id, params) @@ -256,7 +266,8 @@ def test_basic_format_with_funnel_path_get(self): } get_response = self.client.get( - "/api/person/path/", data={**request_data, "funnel_filter": json.dumps(funnel_filter)} + "/api/person/path/", + data={**request_data, "funnel_filter": json.dumps(funnel_filter)}, ) self.assertEqual(get_response.status_code, status.HTTP_200_OK) get_j = get_response.json() diff --git a/ee/clickhouse/views/test/test_clickhouse_paths.py b/ee/clickhouse/views/test/test_clickhouse_paths.py index ff82efb250be4..2a9e7f47cf437 100644 --- a/ee/clickhouse/views/test/test_clickhouse_paths.py +++ b/ee/clickhouse/views/test/test_clickhouse_paths.py @@ -3,7 +3,12 @@ from rest_framework import status from posthog.constants import FUNNEL_PATH_AFTER_STEP, INSIGHT_FUNNELS, INSIGHT_PATHS -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) class TestClickhousePaths(ClickhouseTestMixin, APIBaseTest): @@ -37,8 +42,18 @@ def _create_sample_data(self, num, delete=False): def test_insight_paths_basic(self): _create_person(team=self.team, distinct_ids=["person_1"]) - _create_event(properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team) - _create_event(properties={"$current_url": "/about"}, distinct_id="person_1", event="$pageview", team=self.team) + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) response = self.client.get(f"/api/projects/{self.team.id}/insights/path").json() self.assertEqual(len(response["result"]), 1) @@ -50,90 +65,183 @@ def test_insight_paths_basic_exclusions(self): _create_event(distinct_id="person_1", event="third event", team=self.team) response = self.client.get( - f"/api/projects/{self.team.id}/insights/path", data={"exclude_events": '["second event"]'} + f"/api/projects/{self.team.id}/insights/path", + data={"exclude_events": '["second event"]'}, ).json() self.assertEqual(len(response["result"]), 1) def test_backwards_compatible_path_types(self): - _create_person(team=self.team, distinct_ids=["person_1"]) - _create_event(properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team) - _create_event(properties={"$current_url": "/about"}, distinct_id="person_1", event="$pageview", team=self.team) _create_event( - properties={"$current_url": "/something else"}, distinct_id="person_1", event="$pageview", team=self.team + properties={"$current_url": "/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/something else"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$screen_name": "/screen1"}, + distinct_id="person_1", + event="$screen", + team=self.team, ) - _create_event(properties={"$screen_name": "/screen1"}, distinct_id="person_1", event="$screen", team=self.team) _create_event(distinct_id="person_1", event="custom1", team=self.team) _create_event(distinct_id="person_1", event="custom2", team=self.team) response = self.client.get( - f"/api/projects/{self.team.id}/insights/path", data={"path_type": "$pageview", "insight": "PATHS"} + f"/api/projects/{self.team.id}/insights/path", + data={"path_type": "$pageview", "insight": "PATHS"}, ).json() self.assertEqual(len(response["result"]), 2) response = self.client.get( - f"/api/projects/{self.team.id}/insights/path", data={"path_type": "custom_event", "insight": "PATHS"} + f"/api/projects/{self.team.id}/insights/path", + data={"path_type": "custom_event", "insight": "PATHS"}, ).json() self.assertEqual(len(response["result"]), 1) response = self.client.get( - f"/api/projects/{self.team.id}/insights/path", data={"path_type": "$screen", "insight": "PATHS"} + f"/api/projects/{self.team.id}/insights/path", + data={"path_type": "$screen", "insight": "PATHS"}, ).json() self.assertEqual(len(response["result"]), 0) def test_backwards_compatible_start_point(self): - _create_person(team=self.team, distinct_ids=["person_1"]) - _create_event(properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team) - _create_event(properties={"$current_url": "/about"}, distinct_id="person_1", event="$pageview", team=self.team) _create_event( - properties={"$current_url": "/something else"}, distinct_id="person_1", event="$pageview", team=self.team + properties={"$current_url": "/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/something else"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$screen_name": "/screen1"}, + distinct_id="person_1", + event="$screen", + team=self.team, + ) + _create_event( + properties={"$screen_name": "/screen2"}, + distinct_id="person_1", + event="$screen", + team=self.team, ) - _create_event(properties={"$screen_name": "/screen1"}, distinct_id="person_1", event="$screen", team=self.team) - _create_event(properties={"$screen_name": "/screen2"}, distinct_id="person_1", event="$screen", team=self.team) _create_event(distinct_id="person_1", event="custom1", team=self.team) _create_event(distinct_id="person_1", event="custom2", team=self.team) response = self.client.get( f"/api/projects/{self.team.id}/insights/path", - data={"path_type": "$pageview", "insight": "PATHS", "start_point": "/about"}, + data={ + "path_type": "$pageview", + "insight": "PATHS", + "start_point": "/about", + }, ).json() self.assertEqual(len(response["result"]), 1) response = self.client.get( f"/api/projects/{self.team.id}/insights/path", - data={"path_type": "custom_event", "insight": "PATHS", "start_point": "custom2"}, + data={ + "path_type": "custom_event", + "insight": "PATHS", + "start_point": "custom2", + }, ).json() self.assertEqual(len(response["result"]), 0) response = self.client.get( f"/api/projects/{self.team.id}/insights/path", - data={"path_type": "$screen", "insight": "PATHS", "start_point": "/screen1"}, + data={ + "path_type": "$screen", + "insight": "PATHS", + "start_point": "/screen1", + }, ).json() self.assertEqual(len(response["result"]), 1) def test_path_groupings(self): _create_person(team=self.team, distinct_ids=["person_1"]) _create_event( - properties={"$current_url": "/about_1"}, distinct_id="person_1", event="$pageview", team=self.team + properties={"$current_url": "/about_1"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/about_2"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/something else"}, + distinct_id="person_1", + event="$pageview", + team=self.team, ) _create_event( - properties={"$current_url": "/about_2"}, distinct_id="person_1", event="$pageview", team=self.team + properties={"$current_url": "/about3"}, + distinct_id="person_1", + event="$pageview", + team=self.team, ) _create_event( - properties={"$current_url": "/something else"}, distinct_id="person_1", event="$pageview", team=self.team + properties={"$current_url": "/about4"}, + distinct_id="person_1", + event="$pageview", + team=self.team, ) - _create_event(properties={"$current_url": "/about3"}, distinct_id="person_1", event="$pageview", team=self.team) - _create_event(properties={"$current_url": "/about4"}, distinct_id="person_1", event="$pageview", team=self.team) _create_person(team=self.team, distinct_ids=["person_2"]) _create_event( - properties={"$current_url": "/about_1"}, distinct_id="person_2", event="$pageview", team=self.team + properties={"$current_url": "/about_1"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/about_2"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + ) + _create_event( + properties={"$current_url": "/something else"}, + distinct_id="person_2", + event="$pageview", + team=self.team, ) _create_event( - properties={"$current_url": "/about_2"}, distinct_id="person_2", event="$pageview", team=self.team + properties={"$current_url": "/about3"}, + distinct_id="person_2", + event="$pageview", + team=self.team, ) _create_event( - properties={"$current_url": "/something else"}, distinct_id="person_2", event="$pageview", team=self.team + properties={"$current_url": "/about4"}, + distinct_id="person_2", + event="$pageview", + team=self.team, ) - _create_event(properties={"$current_url": "/about3"}, distinct_id="person_2", event="$pageview", team=self.team) - _create_event(properties={"$current_url": "/about4"}, distinct_id="person_2", event="$pageview", team=self.team) response = self.client.get( f"/api/projects/{self.team.id}/insights/path", @@ -173,11 +281,19 @@ def test_funnel_path_post(self): } post_response = self.client.post( - f"/api/projects/{self.team.id}/insights/path/", data={**request_data, "funnel_filter": funnel_filter} + f"/api/projects/{self.team.id}/insights/path/", + data={**request_data, "funnel_filter": funnel_filter}, ) self.assertEqual(post_response.status_code, status.HTTP_200_OK) post_j = post_response.json() self.assertEqual( post_j["result"], - [{"source": "1_step two", "target": "2_step three", "value": 4, "average_conversion_time": 600000.0}], + [ + { + "source": "1_step two", + "target": "2_step three", + "value": 4, + "average_conversion_time": 600000.0, + } + ], ) diff --git a/ee/clickhouse/views/test/test_clickhouse_retention.py b/ee/clickhouse/views/test/test_clickhouse_retention.py index 19b912e7c448e..f64aa17ca5834 100644 --- a/ee/clickhouse/views/test/test_clickhouse_retention.py +++ b/ee/clickhouse/views/test/test_clickhouse_retention.py @@ -7,7 +7,10 @@ from posthog.api.test.test_organization import create_organization from posthog.api.test.test_team import create_team from posthog.api.test.test_user import create_user -from posthog.models.instance_setting import get_instance_setting, override_instance_config +from posthog.models.instance_setting import ( + get_instance_setting, + override_instance_config, +) from posthog.models.person import Person as PersonModel from posthog.test.base import ( APIBaseTest, @@ -22,7 +25,6 @@ class RetentionTests(APIBaseTest, ClickhouseTestMixin): @snapshot_clickhouse_queries def test_retention_test_account_filters(self): - organization = create_organization(name="test") team = create_team(organization=organization) user = create_user(email="test@posthog.com", password="1234", organization=organization) @@ -30,19 +32,37 @@ def test_retention_test_account_filters(self): self.client.force_login(user) team.test_account_filters = [ - {"key": "email", "type": "person", "value": "posthog.com", "operator": "not_icontains"} + { + "key": "email", + "type": "person", + "value": "posthog.com", + "operator": "not_icontains", + } ] team.save() - update_or_create_person(distinct_ids=["person 1"], team_id=team.pk, properties={"email": "posthog.com"}) + update_or_create_person( + distinct_ids=["person 1"], + team_id=team.pk, + properties={"email": "posthog.com"}, + ) update_or_create_person(distinct_ids=["person 2"], team_id=team.pk) update_or_create_person(distinct_ids=["person 3"], team_id=team.pk) setup_user_activity_by_day( daily_activity={ - "2020-01-01": {"person 1": [{"event": "target event"}], "person 2": [{"event": "target event"}]}, - "2020-01-02": {"person 1": [{"event": "target event"}], "person 3": [{"event": "target event"}]}, - "2020-01-03": {"person 1": [{"event": "target event"}], "person 3": [{"event": "target event"}]}, + "2020-01-01": { + "person 1": [{"event": "target event"}], + "person 2": [{"event": "target event"}], + }, + "2020-01-02": { + "person 1": [{"event": "target event"}], + "person 3": [{"event": "target event"}], + }, + "2020-01-03": { + "person 1": [{"event": "target event"}], + "person 3": [{"event": "target event"}], + }, }, team=team, ) @@ -64,7 +84,10 @@ def test_retention_test_account_filters(self): retention_by_cohort_by_period = get_by_cohort_by_period_for_response(client=self.client, response=retention) - assert retention_by_cohort_by_period == {"Day 0": {"1": ["person 2"], "2": []}, "Day 1": {"1": ["person 3"]}} + assert retention_by_cohort_by_period == { + "Day 0": {"1": ["person 2"], "2": []}, + "Day 1": {"1": ["person 3"]}, + } @snapshot_clickhouse_queries def test_retention_aggregation_by_distinct_id_and_retrieve_people(self): @@ -79,8 +102,14 @@ def test_retention_aggregation_by_distinct_id_and_retrieve_people(self): setup_user_activity_by_day( daily_activity={ - "2020-01-01": {"person 1": [{"event": "target event"}], "another one": [{"event": "target event"}]}, - "2020-01-02": {"person 1": [{"event": "target event"}], "person 2": [{"event": "target event"}]}, + "2020-01-01": { + "person 1": [{"event": "target event"}], + "another one": [{"event": "target event"}], + }, + "2020-01-02": { + "person 1": [{"event": "target event"}], + "person 2": [{"event": "target event"}], + }, "2020-01-03": {"another one": [{"event": "target event"}]}, }, team=team, @@ -182,7 +211,14 @@ def test_people_stable_pagination(self): # but we can assert that all 3 count distinct IDs should be in this list. self.assertTrue( distinct_id in distinct_ids - for distinct_id in ["person 4", "person 3", "person 1", "person 2", "person 0", "person 5"] + for distinct_id in [ + "person 4", + "person 3", + "person 1", + "person 2", + "person 0", + "person 5", + ] ) people_url = response_json["next"] @@ -283,9 +319,18 @@ def test_can_get_retention_cohort_breakdown(self): setup_user_activity_by_day( daily_activity={ - "2020-01-01": {"person 1": [{"event": "target event"}], "person 2": [{"event": "target event"}]}, - "2020-01-02": {"person 1": [{"event": "target event"}], "person 3": [{"event": "target event"}]}, - "2020-01-03": {"person 1": [{"event": "target event"}], "person 3": [{"event": "target event"}]}, + "2020-01-01": { + "person 1": [{"event": "target event"}], + "person 2": [{"event": "target event"}], + }, + "2020-01-02": { + "person 1": [{"event": "target event"}], + "person 3": [{"event": "target event"}], + }, + "2020-01-03": { + "person 1": [{"event": "target event"}], + "person 3": [{"event": "target event"}], + }, }, team=team, ) @@ -324,9 +369,18 @@ def test_can_get_retention_cohort_breakdown_with_retention_type_target(self): setup_user_activity_by_day( daily_activity={ - "2020-01-01": {"person 1": [{"event": "target event"}], "person 2": [{"event": "target event"}]}, - "2020-01-02": {"person 1": [{"event": "target event"}], "person 3": [{"event": "target event"}]}, - "2020-01-03": {"person 1": [{"event": "target event"}], "person 3": [{"event": "target event"}]}, + "2020-01-01": { + "person 1": [{"event": "target event"}], + "person 2": [{"event": "target event"}], + }, + "2020-01-02": { + "person 1": [{"event": "target event"}], + "person 3": [{"event": "target event"}], + }, + "2020-01-03": { + "person 1": [{"event": "target event"}], + "person 3": [{"event": "target event"}], + }, }, team=team, ) @@ -373,14 +427,20 @@ def test_can_specify_breakdown_person_property(self): setup_user_activity_by_day( daily_activity={ "2020-01-01": {"person 1": [{"event": "target event"}]}, - "2020-01-02": {"person 1": [{"event": "target event"}], "person 2": [{"event": "target event"}]}, + "2020-01-02": { + "person 1": [{"event": "target event"}], + "person 2": [{"event": "target event"}], + }, # IMPORTANT: we include data past the end of the requested # window, as we want to ensure that we pick up all retention # periods for a user. e.g. for "person 2" we do not want to miss # the count from 2020-01-03 e.g. the second period, otherwise we # will skew results for users that didn't perform their target # event right at the beginning of the requested range. - "2020-01-03": {"person 1": [{"event": "target event"}], "person 2": [{"event": "target event"}]}, + "2020-01-03": { + "person 1": [{"event": "target event"}], + "person 2": [{"event": "target event"}], + }, }, team=team, ) @@ -443,7 +503,10 @@ def test_can_specify_breakdown_event_property(self): # the count from 2020-01-03 e.g. the second period, otherwise we # will skew results for users that didn't perform their target # event right at the beginning of the requested range. - "2020-01-03": {"person 1": [{"event": "target event"}], "person 2": [{"event": "target event"}]}, + "2020-01-03": { + "person 1": [{"event": "target event"}], + "person 2": [{"event": "target event"}], + }, }, team=team, ) @@ -502,7 +565,10 @@ def test_can_specify_breakdown_event_property_and_retrieve_people(self): "person 1": [{"event": "target event", "properties": {"os": "Chrome"}}], "person 2": [{"event": "target event", "properties": {"os": "Safari"}}], }, - "2020-01-02": {"person 1": [{"event": "target event"}], "person 2": [{"event": "target event"}]}, + "2020-01-02": { + "person 1": [{"event": "target event"}], + "person 2": [{"event": "target event"}], + }, }, team=team, ) @@ -570,7 +636,10 @@ def test_can_get_retention_week_interval(self): retention_by_cohort_by_period = get_by_cohort_by_period_for_response(client=self.client, response=retention) - assert retention_by_cohort_by_period == {"Week 0": {"1": ["person 1"], "2": []}, "Week 1": {"1": ["person 2"]}} + assert retention_by_cohort_by_period == { + "Week 0": {"1": ["person 1"], "2": []}, + "Week 1": {"1": ["person 2"]}, + } class RegressionTests(APIBaseTest, ClickhouseTestMixin): @@ -602,7 +671,14 @@ def test_can_get_actors_and_use_percent_char_filter(self): date_to="2020-01-08", period="Week", retention_type="retention_first_time", - properties=[{"key": "email", "value": "posthog.com", "operator": "not_icontains", "type": "person"}], + properties=[ + { + "key": "email", + "value": "posthog.com", + "operator": "not_icontains", + "type": "person", + } + ], ), ) diff --git a/ee/clickhouse/views/test/test_clickhouse_stickiness.py b/ee/clickhouse/views/test/test_clickhouse_stickiness.py index 8c13580363103..c9d42a6f75a6a 100644 --- a/ee/clickhouse/views/test/test_clickhouse_stickiness.py +++ b/ee/clickhouse/views/test/test_clickhouse_stickiness.py @@ -4,13 +4,21 @@ from freezegun.api import freeze_time from ee.clickhouse.queries.stickiness import ClickhouseStickiness -from posthog.api.test.test_stickiness import get_stickiness_time_series_ok, stickiness_test_factory +from posthog.api.test.test_stickiness import ( + get_stickiness_time_series_ok, + stickiness_test_factory, +) from posthog.models.action import Action from posthog.models.action_step import ActionStep from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.group.util import create_group from posthog.queries.util import get_earliest_timestamp -from posthog.test.base import ClickhouseTestMixin, _create_event, _create_person, snapshot_clickhouse_queries +from posthog.test.base import ( + ClickhouseTestMixin, + _create_event, + _create_person, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for @@ -29,26 +37,56 @@ def get_people_from_url_ok(client: Client, url: str): return response.json()["results"][0]["people"] -class TestClickhouseStickiness(ClickhouseTestMixin, stickiness_test_factory(ClickhouseStickiness, _create_event, _create_person, _create_action, get_earliest_timestamp)): # type: ignore +class TestClickhouseStickiness( + ClickhouseTestMixin, + stickiness_test_factory( + ClickhouseStickiness, + _create_event, + _create_person, + _create_action, + get_earliest_timestamp, + ), +): # type: ignore @snapshot_clickhouse_queries def test_filter_by_group_properties(self): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:1", properties={"industry": "technology"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:1", + properties={"industry": "technology"}, ) create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:2", properties={"industry": "agriculture"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:2", + properties={"industry": "agriculture"}, ) create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:3", properties={"industry": "technology"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:3", + properties={"industry": "technology"}, ) create_group(team_id=self.team.pk, group_type_index=0, group_key=f"org:4", properties={}) create_group( - team_id=self.team.pk, group_type_index=1, group_key=f"company:1", properties={"industry": "technology"} + team_id=self.team.pk, + group_type_index=1, + group_key=f"company:1", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key=f"instance:1", + properties={}, ) - create_group(team_id=self.team.pk, group_type_index=1, group_key=f"instance:1", properties={}) p1, p2, p3, p4 = self._create_multiple_people( - period=timedelta(weeks=1), event_properties=lambda i: {"$group_0": f"org:{i}", "$group_1": "instance:1"} + period=timedelta(weeks=1), + event_properties=lambda i: { + "$group_0": f"org:{i}", + "$group_1": "instance:1", + }, ) with freeze_time("2020-02-15T13:01:01Z"): @@ -60,7 +98,14 @@ def test_filter_by_group_properties(self): "date_from": "2020-01-01", "date_to": "2020-02-15", "events": [{"id": "watched movie"}], - "properties": [{"key": "industry", "value": "technology", "type": "group", "group_type_index": 0}], + "properties": [ + { + "key": "industry", + "value": "technology", + "type": "group", + "group_type_index": 0, + } + ], "interval": "week", }, ) @@ -81,16 +126,26 @@ def test_filter_by_group_properties(self): @snapshot_clickhouse_queries def test_aggregate_by_groups(self): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:0", properties={"industry": "technology"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:0", + properties={"industry": "technology"}, ) create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:1", properties={"industry": "agriculture"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:1", + properties={"industry": "agriculture"}, ) create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:2", properties={"industry": "technology"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:2", + properties={"industry": "technology"}, ) self._create_multiple_people( - period=timedelta(weeks=1), event_properties=lambda i: {"$group_0": f"org:{i // 2}"} + period=timedelta(weeks=1), + event_properties=lambda i: {"$group_0": f"org:{i // 2}"}, ) with freeze_time("2020-02-15T13:01:01Z"): @@ -101,7 +156,13 @@ def test_aggregate_by_groups(self): "shown_as": "Stickiness", "date_from": "2020-01-01", "date_to": "2020-02-15", - "events": [{"id": "watched movie", "math": "unique_group", "math_group_type_index": 0}], + "events": [ + { + "id": "watched movie", + "math": "unique_group", + "math_group_type_index": 0, + } + ], "interval": "week", }, ) diff --git a/ee/clickhouse/views/test/test_clickhouse_trends.py b/ee/clickhouse/views/test/test_clickhouse_trends.py index c818c64ce1fbc..75ab015e39a15 100644 --- a/ee/clickhouse/views/test/test_clickhouse_trends.py +++ b/ee/clickhouse/views/test/test_clickhouse_trends.py @@ -12,7 +12,11 @@ from ee.api.test.base import LicensedTestMixin from posthog.api.test.test_cohort import create_cohort_ok -from posthog.api.test.test_event_definition import create_organization, create_team, create_user +from posthog.api.test.test_event_definition import ( + create_organization, + create_team, + create_user, +) from posthog.models.group.util import create_group from posthog.models.group_type_mapping import GroupTypeMapping from posthog.models.instance_setting import set_instance_setting @@ -56,7 +60,11 @@ def test_includes_only_intervals_within_range(client: Client): with freeze_time("2021-09-20T16:00:00"): #  First identify as a member of the cohort distinct_id = "abc" - update_or_create_person(distinct_ids=[distinct_id], team_id=team.id, properties={"cohort_identifier": 1}) + update_or_create_person( + distinct_ids=[distinct_id], + team_id=team.id, + properties={"cohort_identifier": 1}, + ) cohort = create_cohort_ok( client=client, team_id=team.id, @@ -515,7 +523,6 @@ class ClickhouseTestTrends(ClickhouseTestMixin, LicensedTestMixin, APIBaseTest): @snapshot_clickhouse_queries def test_insight_trends_basic(self): - events_by_person = { "1": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3)}], "2": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3)}], @@ -523,7 +530,6 @@ def test_insight_trends_basic(self): created_people = journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", @@ -556,14 +562,19 @@ def test_insight_trends_basic(self): def test_insight_trends_entity_overlap(self): events_by_person = { - "1": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": {"key": "val"}}], + "1": [ + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 14, 3), + "properties": {"key": "val"}, + } + ], "2": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3)}], "3": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3)}], } created_people = journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", @@ -607,20 +618,28 @@ def test_insight_trends_entity_overlap(self): people = get_people_from_url_ok(self.client, data["$pageview - 0"]["2012-01-14"].person_url) assert sorted([p["id"] for p in people]) == sorted( - [str(created_people["1"].uuid), str(created_people["2"].uuid), str(created_people["3"].uuid)] + [ + str(created_people["1"].uuid), + str(created_people["2"].uuid), + str(created_people["3"].uuid), + ] ) @snapshot_clickhouse_queries def test_insight_trends_clean_arg(self): - events_by_actor = { - "1": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": {"key": "val"}}], + "1": [ + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 14, 3), + "properties": {"key": "val"}, + } + ], "2": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3)}], } created_actors = journeys_for(events_by_actor, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", @@ -647,7 +666,6 @@ def test_insight_trends_clean_arg(self): @snapshot_clickhouse_queries def test_insight_trends_aggregate(self): - events_by_person = { "1": [{"event": "$pageview", "timestamp": datetime(2012, 1, 13, 3)}], "2": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3)}], @@ -685,18 +703,37 @@ def test_insight_trends_aggregate(self): @snapshot_clickhouse_queries def test_insight_trends_cumulative(self): - _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"key": "some_val"}) _create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"key": "some_val"}) _create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"key": "some_val"}) events_by_person = { "p1": [ - {"event": "$pageview", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "val"}}, - {"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": {"key": "val"}}, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "val"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 14, 3), + "properties": {"key": "val"}, + }, + ], + "p2": [ + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "notval"}, + } + ], + "p3": [ + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 14, 3), + "properties": {"key": "val"}, + } ], - "p2": [{"event": "$pageview", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "notval"}}], - "p3": [{"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": {"key": "val"}}], } created_people = journeys_for(events_by_person, self.team, create_people=False) @@ -727,7 +764,11 @@ def test_insight_trends_cumulative(self): assert data_response["$pageview"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in person_response]) == sorted( - [str(created_people["p1"].uuid), str(created_people["p2"].uuid), str(created_people["p3"].uuid)] + [ + str(created_people["p1"].uuid), + str(created_people["p2"].uuid), + str(created_people["p3"].uuid), + ] ) # DAU @@ -758,7 +799,11 @@ def test_insight_trends_cumulative(self): assert data_response["$pageview"]["2012-01-14"].label == "14-Jan-2012" assert sorted([p["id"] for p in person_response]) == sorted( - [str(created_people["p1"].uuid), str(created_people["p2"].uuid), str(created_people["p3"].uuid)] + [ + str(created_people["p1"].uuid), + str(created_people["p2"].uuid), + str(created_people["p3"].uuid), + ] ) # breakdown @@ -863,8 +908,20 @@ def test_insight_trends_cumulative(self): @also_test_with_materialized_columns(["key"]) def test_breakdown_with_filter(self): events_by_person = { - "person1": [{"event": "sign up", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "val"}}], - "person2": [{"event": "sign up", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "oh"}}], + "person1": [ + { + "event": "sign up", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "val"}, + } + ], + "person2": [ + { + "event": "sign up", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "oh"}, + } + ], } created_people = journeys_for(events_by_person, self.team) @@ -903,12 +960,28 @@ def test_breakdown_with_filter(self): def test_insight_trends_compare(self): events_by_person = { "p1": [ - {"event": "$pageview", "timestamp": datetime(2012, 1, 5, 3), "properties": {"key": "val"}}, - {"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": {"key": "val"}}, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 5, 3), + "properties": {"key": "val"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 14, 3), + "properties": {"key": "val"}, + }, ], "p2": [ - {"event": "$pageview", "timestamp": datetime(2012, 1, 5, 3), "properties": {"key": "notval"}}, - {"event": "$pageview", "timestamp": datetime(2012, 1, 14, 3), "properties": {"key": "notval"}}, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 5, 3), + "properties": {"key": "notval"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 14, 3), + "properties": {"key": "notval"}, + }, ], } created_people = journeys_for(events_by_person, self.team) @@ -917,7 +990,14 @@ def test_insight_trends_compare(self): request = TrendsRequest( date_from="-7d", compare=True, - events=[{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}], + events=[ + { + "id": "$pageview", + "name": "$pageview", + "type": "events", + "order": 0, + } + ], ) data_response = get_trends_time_series_ok(self.client, request, self.team) @@ -929,10 +1009,12 @@ def test_insight_trends_compare(self): with freeze_time("2012-01-15T04:01:34.000Z"): curr_people = get_people_from_url_ok( - self.client, data_response["$pageview - current"]["2012-01-14"].person_url + self.client, + data_response["$pageview - current"]["2012-01-14"].person_url, ) prev_people = get_people_from_url_ok( - self.client, data_response["$pageview - previous"]["2012-01-05"].person_url + self.client, + data_response["$pageview - previous"]["2012-01-05"].person_url, ) assert sorted([p["id"] for p in curr_people]) == sorted( @@ -951,11 +1033,29 @@ def _create_groups(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:7", properties={"industry": "finance"}) create_group( - team_id=self.team.pk, group_type_index=1, group_key="company:10", properties={"industry": "finance"} + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:7", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:10", + properties={"industry": "finance"}, ) @snapshot_clickhouse_queries @@ -964,8 +1064,16 @@ def test_aggregating_by_group(self): events_by_person = { "person1": [ - {"event": "$pageview", "timestamp": datetime(2020, 1, 2, 12), "properties": {"$group_0": "org:5"}}, - {"event": "$pageview", "timestamp": datetime(2020, 1, 2, 12), "properties": {"$group_0": "org:6"}}, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 2, 12), + "properties": {"$group_0": "org:5"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 2, 12), + "properties": {"$group_0": "org:6"}, + }, { "event": "$pageview", "timestamp": datetime(2020, 1, 2, 12), @@ -979,7 +1087,13 @@ def test_aggregating_by_group(self): date_from="2020-01-01 00:00:00", date_to="2020-01-12 00:00:00", events=[ - {"id": "$pageview", "type": "events", "order": 0, "math": "unique_group", "math_group_type_index": 0} + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "unique_group", + "math_group_type_index": 0, + } ], ) data_response = get_trends_time_series_ok(self.client, request, self.team) @@ -995,12 +1109,28 @@ def test_aggregating_by_group(self): def test_aggregating_by_session(self): events_by_person = { "person1": [ - {"event": "$pageview", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$session_id": "1"}}, - {"event": "$pageview", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$session_id": "1"}}, - {"event": "$pageview", "timestamp": datetime(2020, 1, 2, 12), "properties": {"$session_id": "2"}}, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$session_id": "1"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$session_id": "1"}, + }, + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 2, 12), + "properties": {"$session_id": "2"}, + }, ], "person2": [ - {"event": "$pageview", "timestamp": datetime(2020, 1, 2, 12), "properties": {"$session_id": "3"}} + { + "event": "$pageview", + "timestamp": datetime(2020, 1, 2, 12), + "properties": {"$session_id": "3"}, + } ], } journeys_for(events_by_person, self.team) @@ -1008,7 +1138,14 @@ def test_aggregating_by_session(self): request = TrendsRequest( date_from="2020-01-01 00:00:00", date_to="2020-01-12 00:00:00", - events=[{"id": "$pageview", "type": "events", "order": 0, "math": "unique_session"}], + events=[ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "unique_session", + } + ], ) data_response = get_trends_time_series_ok(self.client, request, self.team) @@ -1035,7 +1172,6 @@ def test_insight_trends_merging(self): journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", @@ -1062,7 +1198,6 @@ def test_insight_trends_merging(self): journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", @@ -1102,7 +1237,6 @@ def test_insight_trends_merging_multiple(self): journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", @@ -1152,7 +1286,6 @@ def test_insight_trends_merging_multiple(self): journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", @@ -1196,15 +1329,28 @@ def test_insight_trends_merging_breakdown(self): events_by_person = { "1": [ - {"event": "$action", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "1"}}, - {"event": "$action", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "2"}}, + { + "event": "$action", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "1"}, + }, + { + "event": "$action", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "2"}, + }, + ], + "2": [ + { + "event": "$action", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "1"}, + } ], - "2": [{"event": "$action", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "1"}}], } journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequestBreakdown( date_from="-14d", display="ActionsLineGraph", @@ -1233,15 +1379,24 @@ def test_insight_trends_merging_breakdown(self): assert data["$action - 2"]["2012-01-15"].value == 0 events_by_person = { - "1": [{"event": "$action", "timestamp": datetime(2012, 1, 15, 3), "properties": {"key": "2"}}], + "1": [ + { + "event": "$action", + "timestamp": datetime(2012, 1, 15, 3), + "properties": {"key": "2"}, + } + ], "2": [ - {"event": "$action", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "2"}} + { + "event": "$action", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "2"}, + } ], # this won't be counted } journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequestBreakdown( date_from="-14d", display="ActionsLineGraph", @@ -1276,19 +1431,38 @@ def test_insight_trends_merging_breakdown_multiple(self): events_by_person = { "1": [ - {"event": "$pageview", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "1"}}, - {"event": "$action", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "1"}}, - {"event": "$action", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "2"}}, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "1"}, + }, + { + "event": "$action", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "1"}, + }, + { + "event": "$action", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "2"}, + }, ], "2": [ - {"event": "$pageview", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "1"}}, - {"event": "$action", "timestamp": datetime(2012, 1, 13, 3), "properties": {"key": "1"}}, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "1"}, + }, + { + "event": "$action", + "timestamp": datetime(2012, 1, 13, 3), + "properties": {"key": "1"}, + }, ], } journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequestBreakdown( date_from="-14d", display="ActionsLineGraph", @@ -1332,8 +1506,16 @@ def test_insight_trends_merging_breakdown_multiple(self): events_by_person = { "1": [ - {"event": "$pageview", "timestamp": datetime(2012, 1, 15, 3), "properties": {"key": "1"}}, - {"event": "$action", "timestamp": datetime(2012, 1, 15, 3), "properties": {"key": "2"}}, + { + "event": "$pageview", + "timestamp": datetime(2012, 1, 15, 3), + "properties": {"key": "1"}, + }, + { + "event": "$action", + "timestamp": datetime(2012, 1, 15, 3), + "properties": {"key": "2"}, + }, ], "2": [ { @@ -1346,7 +1528,6 @@ def test_insight_trends_merging_breakdown_multiple(self): journeys_for(events_by_person, self.team) with freeze_time("2012-01-15T04:01:34.000Z"): - request = TrendsRequestBreakdown( date_from="-14d", display="ActionsLineGraph", @@ -1401,7 +1582,6 @@ def test_insight_trends_merging_skipped_interval(self): journeys_for(events_by_person, self.team) with freeze_time("2012-01-14T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", @@ -1430,7 +1610,6 @@ def test_insight_trends_merging_skipped_interval(self): journeys_for(events_by_person, self.team) with freeze_time("2012-01-16T04:01:34.000Z"): - request = TrendsRequest( date_from="-14d", display="ActionsLineGraph", diff --git a/ee/management/commands/materialize_columns.py b/ee/management/commands/materialize_columns.py index bffae385f0616..953110032b31a 100644 --- a/ee/management/commands/materialize_columns.py +++ b/ee/management/commands/materialize_columns.py @@ -2,7 +2,10 @@ from django.core.management.base import BaseCommand -from ee.clickhouse.materialized_columns.analyze import logger, materialize_properties_task +from ee.clickhouse.materialized_columns.analyze import ( + logger, + materialize_properties_task, +) from ee.clickhouse.materialized_columns.columns import DEFAULT_TABLE_COLUMN from posthog.settings import ( MATERIALIZE_COLUMNS_ANALYSIS_PERIOD_HOURS, @@ -20,7 +23,11 @@ def add_arguments(self, parser): parser.add_argument("--property", help="Property to materialize. Skips analysis.") parser.add_argument( - "--property-table", type=str, default="events", choices=["events", "person"], help="Table of --property" + "--property-table", + type=str, + default="events", + choices=["events", "person"], + help="Table of --property", ) parser.add_argument( "--table-column", @@ -63,7 +70,14 @@ def handle(self, *args, **options): logger.info(f"Materializing column. table={options['property_table']}, property_name={options['property']}") materialize_properties_task( - columns_to_materialize=[(options["property_table"], options["table_column"], options["property"], 0)], + columns_to_materialize=[ + ( + options["property_table"], + options["table_column"], + options["property"], + 0, + ) + ], backfill_period_days=options["backfill_period"], dry_run=options["dry_run"], ) diff --git a/ee/migrations/0001_initial.py b/ee/migrations/0001_initial.py index 754583dd05a75..fd3cad3892708 100644 --- a/ee/migrations/0001_initial.py +++ b/ee/migrations/0001_initial.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - initial = True dependencies: List = [] @@ -15,7 +14,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="License", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True)), ("plan", models.CharField(max_length=200)), ("valid_until", models.DateTimeField()), diff --git a/ee/migrations/0002_hook.py b/ee/migrations/0002_hook.py index 77e0b39d2cbfd..36516d048a3ca 100644 --- a/ee/migrations/0002_hook.py +++ b/ee/migrations/0002_hook.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0082_personalapikey"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), @@ -21,7 +20,10 @@ class Migration(migrations.Migration): fields=[ ("created", models.DateTimeField(auto_now_add=True)), ("updated", models.DateTimeField(auto_now=True)), - ("event", models.CharField(db_index=True, max_length=64, verbose_name="Event")), + ( + "event", + models.CharField(db_index=True, max_length=64, verbose_name="Event"), + ), ("target", models.URLField(max_length=255, verbose_name="Target URL")), ( "id", @@ -36,7 +38,9 @@ class Migration(migrations.Migration): ( "team", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="rest_hooks", to="posthog.Team" + on_delete=django.db.models.deletion.CASCADE, + related_name="rest_hooks", + to="posthog.Team", ), ), ( diff --git a/ee/migrations/0003_license_max_users.py b/ee/migrations/0003_license_max_users.py index 038d196450e40..6760baca0c2c4 100644 --- a/ee/migrations/0003_license_max_users.py +++ b/ee/migrations/0003_license_max_users.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("ee", "0002_hook"), ] diff --git a/ee/migrations/0004_enterpriseeventdefinition_enterprisepropertydefinition.py b/ee/migrations/0004_enterpriseeventdefinition_enterprisepropertydefinition.py index ae0dba6e02de1..cd0d2b6b586cc 100644 --- a/ee/migrations/0004_enterpriseeventdefinition_enterprisepropertydefinition.py +++ b/ee/migrations/0004_enterpriseeventdefinition_enterprisepropertydefinition.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0156_insight_short_id"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), @@ -33,14 +32,21 @@ class Migration(migrations.Migration): ( "tags", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=list, + null=True, + size=None, ), ), ("updated_at", models.DateTimeField(auto_now=True)), ( "updated_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ], @@ -67,7 +73,11 @@ class Migration(migrations.Migration): ( "tags", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=list, + null=True, + size=None, ), ), ("updated_at", models.DateTimeField(auto_now=True)), @@ -83,7 +93,10 @@ class Migration(migrations.Migration): ( "updated_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ], diff --git a/ee/migrations/0005_project_based_permissioning.py b/ee/migrations/0005_project_based_permissioning.py index 42e3ba203abbc..d785637d17d40 100644 --- a/ee/migrations/0005_project_based_permissioning.py +++ b/ee/migrations/0005_project_based_permissioning.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0170_project_based_permissioning"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), @@ -22,10 +21,16 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), - ("level", models.PositiveSmallIntegerField(choices=[(1, "member"), (8, "administrator")], default=1)), + ( + "level", + models.PositiveSmallIntegerField(choices=[(1, "member"), (8, "administrator")], default=1), + ), ("joined_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), ( @@ -51,7 +56,8 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="explicitteammembership", constraint=models.UniqueConstraint( - fields=("team", "parent_membership"), name="unique_explicit_team_membership" + fields=("team", "parent_membership"), + name="unique_explicit_team_membership", ), ), ] diff --git a/ee/migrations/0006_event_definition_verification.py b/ee/migrations/0006_event_definition_verification.py index 69654bdf7f0c1..c86f415d3f23f 100644 --- a/ee/migrations/0006_event_definition_verification.py +++ b/ee/migrations/0006_event_definition_verification.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("ee", "0005_project_based_permissioning"), diff --git a/ee/migrations/0007_dashboard_permissions.py b/ee/migrations/0007_dashboard_permissions.py index d7d2943283b74..015498bfca9dc 100644 --- a/ee/migrations/0007_dashboard_permissions.py +++ b/ee/migrations/0007_dashboard_permissions.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0203_dashboard_permissions"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), @@ -22,7 +21,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ( diff --git a/ee/migrations/0008_null_definition_descriptions.py b/ee/migrations/0008_null_definition_descriptions.py index edd4a6d17f5a8..1172813b25aca 100644 --- a/ee/migrations/0008_null_definition_descriptions.py +++ b/ee/migrations/0008_null_definition_descriptions.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("ee", "0007_dashboard_permissions"), ] diff --git a/ee/migrations/0009_deprecated_old_tags.py b/ee/migrations/0009_deprecated_old_tags.py index 4f5b6e91a6ed3..c01f76cfd6cf8 100644 --- a/ee/migrations/0009_deprecated_old_tags.py +++ b/ee/migrations/0009_deprecated_old_tags.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("ee", "0008_null_definition_descriptions"), ] diff --git a/ee/migrations/0010_migrate_definitions_tags.py b/ee/migrations/0010_migrate_definitions_tags.py index ca763d2273b27..687d7460447ad 100644 --- a/ee/migrations/0010_migrate_definitions_tags.py +++ b/ee/migrations/0010_migrate_definitions_tags.py @@ -11,7 +11,10 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [("ee", "0009_deprecated_old_tags"), ("posthog", "0213_deprecated_old_tags")] + dependencies = [ + ("ee", "0009_deprecated_old_tags"), + ("posthog", "0213_deprecated_old_tags"), + ] operations = [ migrations.RunPython(forwards, reverse), diff --git a/ee/migrations/0011_add_tags_back.py b/ee/migrations/0011_add_tags_back.py index 6e77077a21ec2..0f5d2ff4f21c8 100644 --- a/ee/migrations/0011_add_tags_back.py +++ b/ee/migrations/0011_add_tags_back.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("ee", "0010_migrate_definitions_tags"), ] @@ -15,14 +14,22 @@ class Migration(migrations.Migration): model_name="enterpriseeventdefinition", name="tags", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=None, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=None, + null=True, + size=None, ), ), migrations.AddField( model_name="enterprisepropertydefinition", name="tags", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=None, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=None, + null=True, + size=None, ), ), ] diff --git a/ee/migrations/0012_migrate_tags_v2.py b/ee/migrations/0012_migrate_tags_v2.py index a058d46bd59d1..c7e325225ec41 100644 --- a/ee/migrations/0012_migrate_tags_v2.py +++ b/ee/migrations/0012_migrate_tags_v2.py @@ -34,14 +34,21 @@ def forwards(apps, schema_editor): for event_definition_page in event_definition_paginator.page_range: logger.info( - "event_definition_tag_batch_get_start", limit=batch_size, offset=(event_definition_page - 1) * batch_size + "event_definition_tag_batch_get_start", + limit=batch_size, + offset=(event_definition_page - 1) * batch_size, ) event_definitions = iter(event_definition_paginator.get_page(event_definition_page)) for tags, team_id, event_definition_id in event_definitions: unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "") for tag in unique_tags: temp_tag = Tag(name=tag, team_id=team_id) - createables.append((temp_tag, TaggedItem(event_definition_id=event_definition_id, tag_id=temp_tag.id))) + createables.append( + ( + temp_tag, + TaggedItem(event_definition_id=event_definition_id, tag_id=temp_tag.id), + ) + ) logger.info("event_definition_tag_get_end", tags_count=len(createables)) num_event_definition_tags = len(createables) @@ -68,10 +75,19 @@ def forwards(apps, schema_editor): for tag in unique_tags: temp_tag = Tag(name=tag, team_id=team_id) createables.append( - (temp_tag, TaggedItem(property_definition_id=property_definition_id, tag_id=temp_tag.id)) + ( + temp_tag, + TaggedItem( + property_definition_id=property_definition_id, + tag_id=temp_tag.id, + ), + ) ) - logger.info("property_definition_tag_get_end", tags_count=len(createables) - num_event_definition_tags) + logger.info( + "property_definition_tag_get_end", + tags_count=len(createables) - num_event_definition_tags, + ) # Consistent ordering to make independent runs non-deterministic createables = sorted(createables, key=lambda pair: pair[0].name) @@ -102,7 +118,9 @@ def forwards(apps, schema_editor): # Create tag <-> item relationships, ignoring conflicts TaggedItem.objects.bulk_create( - [tagged_item for (_, tagged_item) in createable_batch], ignore_conflicts=True, batch_size=batch_size + [tagged_item for (_, tagged_item) in createable_batch], + ignore_conflicts=True, + batch_size=batch_size, ) logger.info("ee/0012_migrate_tags_v2_end") diff --git a/ee/migrations/0013_silence_deprecated_tags_warnings.py b/ee/migrations/0013_silence_deprecated_tags_warnings.py index b16c2df8ba879..c27f29ef35002 100644 --- a/ee/migrations/0013_silence_deprecated_tags_warnings.py +++ b/ee/migrations/0013_silence_deprecated_tags_warnings.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("ee", "0012_migrate_tags_v2"), ] diff --git a/ee/migrations/0014_roles_memberships_and_resource_access.py b/ee/migrations/0014_roles_memberships_and_resource_access.py index 7267062afe1db..dd5b0a7468841 100644 --- a/ee/migrations/0014_roles_memberships_and_resource_access.py +++ b/ee/migrations/0014_roles_memberships_and_resource_access.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0280_fix_async_deletion_team"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), @@ -22,14 +21,18 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=200)), ( "feature_flags_access_level", models.PositiveSmallIntegerField( - choices=[(21, "Can only view"), (37, "Can always edit")], default=37 + choices=[(21, "Can only view"), (37, "Can always edit")], + default=37, ), ), ("created_at", models.DateTimeField(auto_now_add=True)), @@ -60,7 +63,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("joined_at", models.DateTimeField(auto_now_add=True)), @@ -88,7 +94,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="OrganizationResourceAccess", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ( "resource", models.CharField( @@ -107,7 +121,8 @@ class Migration(migrations.Migration): ( "access_level", models.PositiveSmallIntegerField( - choices=[(21, "Can only view"), (37, "Can always edit")], default=37 + choices=[(21, "Can only view"), (37, "Can always edit")], + default=37, ), ), ("created_at", models.DateTimeField(auto_now_add=True)), @@ -115,7 +130,9 @@ class Migration(migrations.Migration): ( "created_by", models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( @@ -131,7 +148,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="FeatureFlagRoleAccess", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("added_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), ( @@ -165,7 +190,8 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="organizationresourceaccess", constraint=models.UniqueConstraint( - fields=("organization", "resource"), name="unique resource per organization" + fields=("organization", "resource"), + name="unique resource per organization", ), ), migrations.AddConstraint( diff --git a/ee/migrations/0015_add_verified_properties.py b/ee/migrations/0015_add_verified_properties.py index 9b7fb3f860901..c61c980ba4ec5 100644 --- a/ee/migrations/0015_add_verified_properties.py +++ b/ee/migrations/0015_add_verified_properties.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("ee", "0014_roles_memberships_and_resource_access"), diff --git a/ee/models/dashboard_privilege.py b/ee/models/dashboard_privilege.py index c9fa62fb10763..40c48ec9ca9d2 100644 --- a/ee/models/dashboard_privilege.py +++ b/ee/models/dashboard_privilege.py @@ -7,7 +7,10 @@ # We call models that grant a user access to some resource (which isn't a grouping of users) a "privilege" class DashboardPrivilege(UUIDModel): dashboard: models.ForeignKey = models.ForeignKey( - "posthog.Dashboard", on_delete=models.CASCADE, related_name="privileges", related_query_name="privilege" + "posthog.Dashboard", + on_delete=models.CASCADE, + related_name="privileges", + related_query_name="privilege", ) user: models.ForeignKey = models.ForeignKey( "posthog.User", diff --git a/ee/models/event_definition.py b/ee/models/event_definition.py index ab2c9d67de526..408fbcec43cf5 100644 --- a/ee/models/event_definition.py +++ b/ee/models/event_definition.py @@ -5,18 +5,31 @@ class EnterpriseEventDefinition(EventDefinition): - owner = models.ForeignKey("posthog.User", null=True, on_delete=models.SET_NULL, related_name="event_definitions") + owner = models.ForeignKey( + "posthog.User", + null=True, + on_delete=models.SET_NULL, + related_name="event_definitions", + ) description: models.TextField = models.TextField(blank=True, null=True, default="") updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) updated_by = models.ForeignKey("posthog.User", null=True, on_delete=models.SET_NULL, blank=True) verified: models.BooleanField = models.BooleanField(default=False, blank=True) verified_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) verified_by = models.ForeignKey( - "posthog.User", null=True, on_delete=models.SET_NULL, blank=True, related_name="verifying_user" + "posthog.User", + null=True, + on_delete=models.SET_NULL, + blank=True, + related_name="verifying_user", ) # Deprecated in favour of app-wide tagging model. See EnterpriseTaggedItem deprecated_tags: ArrayField = ArrayField(models.CharField(max_length=32), null=True, blank=True, default=list) deprecated_tags_v2: ArrayField = ArrayField( - models.CharField(max_length=32), null=True, blank=True, default=None, db_column="tags" + models.CharField(max_length=32), + null=True, + blank=True, + default=None, + db_column="tags", ) diff --git a/ee/models/explicit_team_membership.py b/ee/models/explicit_team_membership.py index 3878d1dbf32c2..4ea34b318aa08 100644 --- a/ee/models/explicit_team_membership.py +++ b/ee/models/explicit_team_membership.py @@ -36,7 +36,10 @@ class Level(models.IntegerChoices): class Meta: constraints = [ - models.UniqueConstraint(fields=["team", "parent_membership"], name="unique_explicit_team_membership") + models.UniqueConstraint( + fields=["team", "parent_membership"], + name="unique_explicit_team_membership", + ) ] def __str__(self): diff --git a/ee/models/hook.py b/ee/models/hook.py index 0753dde01a77f..735a22e9d85c7 100644 --- a/ee/models/hook.py +++ b/ee/models/hook.py @@ -31,11 +31,15 @@ def clean(self): def hook_saved(sender, instance: Hook, created, **kwargs): if instance.event == "action_performed": get_client().publish( - "reload-action", json.dumps({"teamId": instance.team_id, "actionId": instance.resource_id}) + "reload-action", + json.dumps({"teamId": instance.team_id, "actionId": instance.resource_id}), ) @mutable_receiver(post_delete, sender=Hook) def hook_deleted(sender, instance: Hook, **kwargs): if instance.event == "action_performed": - get_client().publish("drop-action", json.dumps({"teamId": instance.team_id, "actionId": instance.resource_id})) + get_client().publish( + "drop-action", + json.dumps({"teamId": instance.team_id, "actionId": instance.resource_id}), + ) diff --git a/ee/models/license.py b/ee/models/license.py index 8e0779d4db8a3..17547a867212a 100644 --- a/ee/models/license.py +++ b/ee/models/license.py @@ -34,7 +34,10 @@ def first_valid(self) -> Optional["License"]: valid_licenses = list(self.filter(Q(valid_until__gte=timezone.now()) | Q(plan="cloud"))) if not valid_licenses: return None - return max(valid_licenses, key=lambda license: License.PLAN_TO_SORTING_VALUE.get(license.plan, 0)) + return max( + valid_licenses, + key=lambda license: License.PLAN_TO_SORTING_VALUE.get(license.plan, 0), + ) class License(models.Model): diff --git a/ee/models/organization_resource_access.py b/ee/models/organization_resource_access.py index db28e3a7526c7..201cb354177f1 100644 --- a/ee/models/organization_resource_access.py +++ b/ee/models/organization_resource_access.py @@ -36,5 +36,8 @@ class Resources(models.TextChoices): class Meta: constraints = [ - models.UniqueConstraint(fields=["organization", "resource"], name="unique resource per organization") + models.UniqueConstraint( + fields=["organization", "resource"], + name="unique resource per organization", + ) ] diff --git a/ee/models/property_definition.py b/ee/models/property_definition.py index b49f239771153..a93b8e957511e 100644 --- a/ee/models/property_definition.py +++ b/ee/models/property_definition.py @@ -12,11 +12,19 @@ class EnterprisePropertyDefinition(PropertyDefinition): verified: models.BooleanField = models.BooleanField(default=False, blank=True) verified_at: models.DateTimeField = models.DateTimeField(null=True, blank=True) verified_by = models.ForeignKey( - "posthog.User", null=True, on_delete=models.SET_NULL, blank=True, related_name="property_verifying_user" + "posthog.User", + null=True, + on_delete=models.SET_NULL, + blank=True, + related_name="property_verifying_user", ) # Deprecated in favour of app-wide tagging model. See EnterpriseTaggedItem deprecated_tags: ArrayField = ArrayField(models.CharField(max_length=32), null=True, blank=True, default=list) deprecated_tags_v2: ArrayField = ArrayField( - models.CharField(max_length=32), null=True, blank=True, default=None, db_column="tags" + models.CharField(max_length=32), + null=True, + blank=True, + default=None, + db_column="tags", ) diff --git a/ee/models/role.py b/ee/models/role.py index 5284972bd7cc1..61908ff83149e 100644 --- a/ee/models/role.py +++ b/ee/models/role.py @@ -12,7 +12,7 @@ class Role(UUIDModel): related_name="roles", related_query_name="role", ) - feature_flags_access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( + feature_flags_access_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField( default=OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT, choices=OrganizationResourceAccess.AccessLevel.choices, ) diff --git a/ee/models/test/test_event_definition_model.py b/ee/models/test/test_event_definition_model.py index 91bba0caff850..253de5d9c1ede 100644 --- a/ee/models/test/test_event_definition_model.py +++ b/ee/models/test/test_event_definition_model.py @@ -8,7 +8,10 @@ class TestEventDefinition(BaseTest): def test_errors_on_invalid_verified_by_type(self): with pytest.raises(ValueError): EnterpriseEventDefinition.objects.create( - team=self.team, name="enterprise event", owner=self.user, verified_by="Not user id" # type: ignore + team=self.team, + name="enterprise event", + owner=self.user, + verified_by="Not user id", # type: ignore ) def test_default_verified_false(self): diff --git a/ee/models/test/test_property_definition_model.py b/ee/models/test/test_property_definition_model.py index 77c3c58b62b0b..25ede95c04d28 100644 --- a/ee/models/test/test_property_definition_model.py +++ b/ee/models/test/test_property_definition_model.py @@ -8,7 +8,9 @@ class TestPropertyDefinition(BaseTest): def test_errors_on_invalid_verified_by_type(self): with pytest.raises(ValueError): EnterprisePropertyDefinition.objects.create( - team=self.team, name="enterprise property", verified_by="Not user id" # type: ignore + team=self.team, + name="enterprise property", + verified_by="Not user id", # type: ignore ) def test_default_verified_false(self): diff --git a/ee/session_recordings/session_recording_extensions.py b/ee/session_recordings/session_recording_extensions.py index 0a7ec6233bea8..9683afc472331 100644 --- a/ee/session_recordings/session_recording_extensions.py +++ b/ee/session_recordings/session_recording_extensions.py @@ -45,7 +45,9 @@ def save_recording_with_new_content(recording: SessionRecording, content: str) - zipped_content = gzip.compress(content.encode("utf-8")) object_storage.write( - new_path, zipped_content, extras={"ContentType": "application/json", "ContentEncoding": "gzip"} + new_path, + zipped_content, + extras={"ContentType": "application/json", "ContentEncoding": "gzip"}, ) recording.storage_version = "2023-08-01" @@ -74,11 +76,17 @@ def persist_recording(recording_id: str, team_id: int) -> None: if recording.deleted: logger.info( - "Persisting recording: skipping as recording is deleted", recording_id=recording_id, team_id=team_id + "Persisting recording: skipping as recording is deleted", + recording_id=recording_id, + team_id=team_id, ) return - logger.info("Persisting recording: loading metadata...", recording_id=recording_id, team_id=team_id) + logger.info( + "Persisting recording: loading metadata...", + recording_id=recording_id, + team_id=team_id, + ) recording.load_metadata() @@ -103,7 +111,12 @@ def persist_recording(recording_id: str, team_id: int) -> None: recording.storage_version = "2023-08-01" recording.object_storage_path = target_prefix recording.save() - logger.info("Persisting recording: done!", recording_id=recording_id, team_id=team_id, source="s3") + logger.info( + "Persisting recording: done!", + recording_id=recording_id, + team_id=team_id, + source="s3", + ) return else: logger.error( diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index 72ee7915cb111..f2cfbbef4dc2f 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -15,13 +15,30 @@ from posthog.session_recordings.session_recording_api import list_recordings from posthog.api.shared import UserBasicSerializer from posthog.constants import SESSION_RECORDINGS_FILTER_IDS, AvailableFeature -from posthog.models import SessionRecording, SessionRecordingPlaylist, SessionRecordingPlaylistItem, Team, User -from posthog.models.activity_logging.activity_log import Change, Detail, changes_between, log_activity +from posthog.models import ( + SessionRecording, + SessionRecordingPlaylist, + SessionRecordingPlaylistItem, + Team, + User, +) +from posthog.models.activity_logging.activity_log import ( + Change, + Detail, + changes_between, + log_activity, +) from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.team.team import check_is_feature_available_for_team from posthog.models.utils import UUIDT -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission -from posthog.rate_limit import ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) +from posthog.rate_limit import ( + ClickHouseBurstRateThrottle, + ClickHouseSustainedRateThrottle, +) from posthog.utils import relative_date_parse logger = structlog.get_logger(__name__) @@ -94,7 +111,10 @@ def create(self, validated_data: Dict, *args, **kwargs) -> SessionRecordingPlayl created_by = validated_data.pop("created_by", request.user) playlist = SessionRecordingPlaylist.objects.create( - team=team, created_by=created_by, last_modified_by=request.user, **validated_data + team=team, + created_by=created_by, + last_modified_by=request.user, + **validated_data, ) log_playlist_activity( @@ -145,7 +165,11 @@ def _check_can_create_playlist(self, team: Team) -> bool: class SessionRecordingPlaylistViewSet(StructuredViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet): queryset = SessionRecordingPlaylist.objects.all() serializer_class = SessionRecordingPlaylistSerializer - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle] filter_backends = [DjangoFilterBackend] filterset_fields = ["short_id", "created_by"] @@ -213,16 +237,26 @@ def recordings(self, request: request.Request, *args: Any, **kwargs: Any) -> res return response.Response(list_recordings(filter, request, context=self.get_serializer_context())) # As of now, you can only "update" a session recording by adding or removing a recording from a static playlist - @action(methods=["POST", "DELETE"], detail=True, url_path="recordings/(?P[^/.]+)") + @action( + methods=["POST", "DELETE"], + detail=True, + url_path="recordings/(?P[^/.]+)", + ) def modify_recordings( - self, request: request.Request, session_recording_id: str, *args: Any, **kwargs: Any + self, + request: request.Request, + session_recording_id: str, + *args: Any, + **kwargs: Any, ) -> response.Response: playlist = self.get_object() # TODO: Maybe we need to save the created_at date here properly to help with filtering if request.method == "POST": recording, _ = SessionRecording.objects.get_or_create( - session_id=session_recording_id, team=self.team, defaults={"deleted": False} + session_id=session_recording_id, + team=self.team, + defaults={"deleted": False}, ) playlist_item, created = SessionRecordingPlaylistItem.objects.get_or_create( playlist=playlist, recording=recording @@ -231,7 +265,9 @@ def modify_recordings( return response.Response({"success": True}) if request.method == "DELETE": - playlist_item = SessionRecordingPlaylistItem.objects.get(playlist=playlist, recording=session_recording_id) # type: ignore + playlist_item = SessionRecordingPlaylistItem.objects.get( + playlist=playlist, recording=session_recording_id + ) # type: ignore if playlist_item: playlist_item.delete() diff --git a/ee/session_recordings/test/test_session_recording_extensions.py b/ee/session_recordings/test/test_session_recording_extensions.py index c71750ed2ab80..35fd5d2bc8b7a 100644 --- a/ee/session_recordings/test/test_session_recording_extensions.py +++ b/ee/session_recordings/test/test_session_recording_extensions.py @@ -15,7 +15,9 @@ ) from posthog.models.signals import mute_selected_signals from posthog.session_recordings.models.session_recording import SessionRecording -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.settings import ( OBJECT_STORAGE_ENDPOINT, OBJECT_STORAGE_ACCESS_KEY_ID, @@ -46,7 +48,8 @@ def teardown_method(self, method) -> None: def test_does_not_persist_too_recent_recording(self): recording = SessionRecording.objects.create( - team=self.team, session_id=f"test_does_not_persist_too_recent_recording-s1-{uuid4()}" + team=self.team, + session_id=f"test_does_not_persist_too_recent_recording-s1-{uuid4()}", ) produce_replay_summary( @@ -67,7 +70,8 @@ def test_can_build_different_object_storage_paths(self) -> None: team_id=self.team.pk, ) recording: SessionRecording = SessionRecording.objects.create( - team=self.team, session_id="test_can_build_different_object_storage_paths-s1" + team=self.team, + session_id="test_can_build_different_object_storage_paths-s1", ) assert ( recording.build_object_storage_path("2022-12-22") diff --git a/ee/session_recordings/test/test_session_recording_playlist.py b/ee/session_recordings/test/test_session_recording_playlist.py index 0881f47697e99..57da070f53c95 100644 --- a/ee/session_recordings/test/test_session_recording_playlist.py +++ b/ee/session_recordings/test/test_session_recording_playlist.py @@ -12,9 +12,13 @@ from ee.api.test.base import APILicensedTest from ee.api.test.fixtures.available_product_features import AVAILABLE_PRODUCT_FEATURES from posthog.models import SessionRecording, SessionRecordingPlaylistItem -from posthog.session_recordings.models.session_recording_playlist import SessionRecordingPlaylist +from posthog.session_recordings.models.session_recording_playlist import ( + SessionRecordingPlaylist, +) from posthog.models.user import User -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.settings import ( OBJECT_STORAGE_ENDPOINT, OBJECT_STORAGE_ACCESS_KEY_ID, @@ -53,7 +57,10 @@ def test_list_playlists(self): } def test_creates_playlist(self): - response = self.client.post(f"/api/projects/{self.team.id}/session_recording_playlists", data={"name": "test"}) + response = self.client.post( + f"/api/projects/{self.team.id}/session_recording_playlists", + data={"name": "test"}, + ) assert response.status_code == status.HTTP_201_CREATED assert response.json() == { "id": response.json()["id"], @@ -77,10 +84,14 @@ def test_creates_too_many_playlists(self): limit = int(feature["limit"]) for _ in range(limit): response = self.client.post( - f"/api/projects/{self.team.id}/session_recording_playlists", data={"name": "test"} + f"/api/projects/{self.team.id}/session_recording_playlists", + data={"name": "test"}, ) assert response.status_code == status.HTTP_201_CREATED - response = self.client.post(f"/api/projects/{self.team.id}/session_recording_playlists", data={"name": "test"}) + response = self.client.post( + f"/api/projects/{self.team.id}/session_recording_playlists", + data={"name": "test"}, + ) assert response.status_code == status.HTTP_403_FORBIDDEN def test_gets_individual_playlist_by_shortid(self): diff --git a/ee/settings.py b/ee/settings.py index 8b381ae9220b1..5fbb4c81fe8ce 100644 --- a/ee/settings.py +++ b/ee/settings.py @@ -30,7 +30,10 @@ SOCIAL_AUTH_SAML_SP_PUBLIC_CERT = "" SOCIAL_AUTH_SAML_SP_PRIVATE_KEY = "" SOCIAL_AUTH_SAML_ORG_INFO = {"en-US": {"name": "posthog", "displayname": "PostHog", "url": "https://posthog.com"}} -SOCIAL_AUTH_SAML_TECHNICAL_CONTACT = {"givenName": "PostHog Support", "emailAddress": "hey@posthog.com"} +SOCIAL_AUTH_SAML_TECHNICAL_CONTACT = { + "givenName": "PostHog Support", + "emailAddress": "hey@posthog.com", +} SOCIAL_AUTH_SAML_SUPPORT_CONTACT = SOCIAL_AUTH_SAML_TECHNICAL_CONTACT diff --git a/ee/tasks/__init__.py b/ee/tasks/__init__.py index dd549cd0c2789..4bc793399424b 100644 --- a/ee/tasks/__init__.py +++ b/ee/tasks/__init__.py @@ -1,5 +1,12 @@ -from ee.session_recordings.persistence_tasks import persist_finished_recordings, persist_single_recording -from .subscriptions import deliver_subscription_report, handle_subscription_value_change, schedule_all_subscriptions +from ee.session_recordings.persistence_tasks import ( + persist_finished_recordings, + persist_single_recording, +) +from .subscriptions import ( + deliver_subscription_report, + handle_subscription_value_change, + schedule_all_subscriptions, +) # As our EE tasks are not included at startup for Celery, we need to ensure they are declared here so that they are imported by posthog/settings/celery.py diff --git a/ee/tasks/auto_rollback_feature_flag.py b/ee/tasks/auto_rollback_feature_flag.py index 249c556a4fd8a..d1b7e606976a6 100644 --- a/ee/tasks/auto_rollback_feature_flag.py +++ b/ee/tasks/auto_rollback_feature_flag.py @@ -73,7 +73,9 @@ def check_condition(rollback_condition: Dict, feature_flag: FeatureFlag) -> bool elif rollback_condition["threshold_type"] == "insight": rolling_average = calculate_rolling_average( - rollback_condition["threshold_metric"], feature_flag.team, feature_flag.team.timezone + rollback_condition["threshold_metric"], + feature_flag.team, + feature_flag.team.timezone, ) if rollback_condition["operator"] == "lt": diff --git a/ee/tasks/materialized_columns.py b/ee/tasks/materialized_columns.py index 93bf06ede8d98..0e33d0d7cf7ab 100644 --- a/ee/tasks/materialized_columns.py +++ b/ee/tasks/materialized_columns.py @@ -1,6 +1,10 @@ from celery.utils.log import get_task_logger -from ee.clickhouse.materialized_columns.columns import TRIM_AND_EXTRACT_PROPERTY, ColumnName, get_materialized_columns +from ee.clickhouse.materialized_columns.columns import ( + TRIM_AND_EXTRACT_PROPERTY, + ColumnName, + get_materialized_columns, +) from posthog.client import sync_execute from posthog.settings import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE @@ -12,7 +16,12 @@ def mark_all_materialized() -> None: logger.info("There are running mutations, skipping marking as materialized") return - for table, property_name, table_column, column_name in get_materialized_columns_with_default_expression(): + for ( + table, + property_name, + table_column, + column_name, + ) in get_materialized_columns_with_default_expression(): updated_table = "sharded_events" if table == "events" else table # :TRICKY: On cloud, we ON CLUSTER updates to events/sharded_events but not to persons. Why? ¯\_(ツ)_/¯ diff --git a/ee/tasks/send_license_usage.py b/ee/tasks/send_license_usage.py index d16924472d45e..1360f124f6b85 100644 --- a/ee/tasks/send_license_usage.py +++ b/ee/tasks/send_license_usage.py @@ -31,7 +31,11 @@ def send_license_usage(): )[0][0] response = requests.post( "https://license.posthog.com/licenses/usage", - data={"date": date_from.strftime("%Y-%m-%d"), "key": license.key, "events_count": events_count}, + data={ + "date": date_from.strftime("%Y-%m-%d"), + "key": license.key, + "events_count": events_count, + }, ) if response.status_code == 404 and response.json().get("code") == "not_found": @@ -56,7 +60,10 @@ def send_license_usage(): "events_count": events_count, "organization_name": user.current_organization.name, # type: ignore }, - groups={"organization": str(user.current_organization.id), "instance": SITE_URL}, # type: ignore + groups={ + "organization": str(user.current_organization.id), # type: ignore + "instance": SITE_URL, + }, ) response.raise_for_status() return @@ -70,7 +77,10 @@ def send_license_usage(): "license_keys": [license.key for license in License.objects.all()], "organization_name": user.current_organization.name, # type: ignore }, - groups={"organization": str(user.current_organization.id), "instance": SITE_URL}, # type: ignore + groups={ + "organization": str(user.current_organization.id), # type: ignore + "instance": SITE_URL, + }, ) except Exception as err: try: @@ -82,7 +92,10 @@ def send_license_usage(): "date": date_from.strftime("%Y-%m-%d"), "organization_name": user.current_organization.name, # type: ignore }, - groups={"organization": str(user.current_organization.id), "instance": SITE_URL}, # type: ignore + groups={ + "organization": str(user.current_organization.id), # type: ignore + "instance": SITE_URL, + }, ) raise err except: diff --git a/ee/tasks/slack.py b/ee/tasks/slack.py index 8ef7ca47d2107..0137089b08bab 100644 --- a/ee/tasks/slack.py +++ b/ee/tasks/slack.py @@ -75,7 +75,10 @@ def _handle_slack_event(event_payload: Any) -> None: "blocks": [ { "type": "section", - "text": {"type": "mrkdwn", "text": insights[0].name or insights[0].derived_name}, + "text": { + "type": "mrkdwn", + "text": insights[0].name or insights[0].derived_name, + }, "accessory": _block_for_asset(assets[0]), } ] @@ -87,7 +90,11 @@ def _handle_slack_event(event_payload: Any) -> None: except Exception as e: # NOTE: This is temporary as a test to understand if the channel and ts are actually required as the docs are not clear slack_integration.client.chat_unfurl( - unfurls=unfurls, unfurl_id=unfurl_id, source=source, channel=channel, ts=message_ts + unfurls=unfurls, + unfurl_id=unfurl_id, + source=source, + channel=channel, + ts=message_ts, ) raise e diff --git a/ee/tasks/subscriptions/__init__.py b/ee/tasks/subscriptions/__init__.py index 17a1455fb2dfb..a5adc7a640763 100644 --- a/ee/tasks/subscriptions/__init__.py +++ b/ee/tasks/subscriptions/__init__.py @@ -14,18 +14,27 @@ logger = structlog.get_logger(__name__) SUBSCRIPTION_QUEUED = Counter( - "subscription_queued", "A subscription was queued for delivery", labelnames=["destination"] + "subscription_queued", + "A subscription was queued for delivery", + labelnames=["destination"], ) SUBSCRIPTION_SUCCESS = Counter( - "subscription_send_success", "A subscription was sent successfully", labelnames=["destination"] + "subscription_send_success", + "A subscription was sent successfully", + labelnames=["destination"], +) +SUBSCRIPTION_FAILURE = Counter( + "subscription_send_failure", + "A subscription failed to send", + labelnames=["destination"], ) -SUBSCRIPTION_FAILURE = Counter("subscription_send_failure", "A subscription failed to send", labelnames=["destination"]) def _deliver_subscription_report( - subscription_id: int, previous_value: Optional[str] = None, invite_message: Optional[str] = None + subscription_id: int, + previous_value: Optional[str] = None, + invite_message: Optional[str] = None, ) -> None: - subscription = ( Subscription.objects.prefetch_related("dashboard__insights") .select_related("created_by", "insight", "dashboard") @@ -44,7 +53,10 @@ def _deliver_subscription_report( insights, assets = generate_assets(subscription) if not assets: - capture_message("No assets are in this subscription", tags={"subscription_id": subscription.id}) + capture_message( + "No assets are in this subscription", + tags={"subscription_id": subscription.id}, + ) return if subscription.target_type == "email": @@ -83,7 +95,10 @@ def _deliver_subscription_report( try: send_slack_subscription_report( - subscription, assets, total_asset_count=len(insights), is_new_subscription=is_new_subscription_target + subscription, + assets, + total_asset_count=len(insights), + is_new_subscription=is_new_subscription_target, ) SUBSCRIPTION_SUCCESS.labels(destination="slack").inc() except Exception as e: diff --git a/ee/tasks/subscriptions/slack_subscriptions.py b/ee/tasks/subscriptions/slack_subscriptions.py index e7531258dd4fc..1d35259a6f3c4 100644 --- a/ee/tasks/subscriptions/slack_subscriptions.py +++ b/ee/tasks/subscriptions/slack_subscriptions.py @@ -25,7 +25,10 @@ def _block_for_asset(asset: ExportedAsset) -> Dict: def send_slack_subscription_report( - subscription: Subscription, assets: List[ExportedAsset], total_asset_count: int, is_new_subscription: bool = False + subscription: Subscription, + assets: List[ExportedAsset], + total_asset_count: int, + is_new_subscription: bool = False, ) -> None: utm_tags = f"{UTM_TAGS_BASE}&utm_medium=slack" @@ -54,10 +57,20 @@ def send_slack_subscription_report( blocks = [] - blocks.extend([{"type": "section", "text": {"type": "mrkdwn", "text": title}}, _block_for_asset(first_asset)]) + blocks.extend( + [ + {"type": "section", "text": {"type": "mrkdwn", "text": title}}, + _block_for_asset(first_asset), + ] + ) if other_assets: - blocks.append({"type": "section", "text": {"type": "mrkdwn", "text": "_See 🧵 for more Insights_"}}) + blocks.append( + { + "type": "section", + "text": {"type": "mrkdwn", "text": "_See 🧵 for more Insights_"}, + } + ) blocks.extend( [ diff --git a/ee/tasks/subscriptions/subscription_utils.py b/ee/tasks/subscriptions/subscription_utils.py index 265e0385a5bcf..5df00e4a8ee85 100644 --- a/ee/tasks/subscriptions/subscription_utils.py +++ b/ee/tasks/subscriptions/subscription_utils.py @@ -26,7 +26,8 @@ def generate_assets( - resource: Union[Subscription, SharingConfiguration], max_asset_count: int = DEFAULT_MAX_ASSET_COUNT + resource: Union[Subscription, SharingConfiguration], + max_asset_count: int = DEFAULT_MAX_ASSET_COUNT, ) -> Tuple[List[Insight], List[ExportedAsset]]: with SUBSCRIPTION_ASSET_GENERATION_TIMER.time(): if resource.dashboard: @@ -39,7 +40,12 @@ def generate_assets( # Create all the assets we need assets = [ - ExportedAsset(team=resource.team, export_format="image/png", insight=insight, dashboard=resource.dashboard) + ExportedAsset( + team=resource.team, + export_format="image/png", + insight=insight, + dashboard=resource.dashboard, + ) for insight in insights[:max_asset_count] ] ExportedAsset.objects.bulk_create(assets) @@ -53,7 +59,8 @@ def generate_assets( parallel_job = chain(*tasks).apply_async() wait_for_parallel_celery_group( - parallel_job, max_timeout=timedelta(minutes=settings.ASSET_GENERATION_MAX_TIMEOUT_MINUTES) + parallel_job, + max_timeout=timedelta(minutes=settings.ASSET_GENERATION_MAX_TIMEOUT_MINUTES), ) return insights, assets diff --git a/ee/tasks/test/subscriptions/test_email_subscriptions.py b/ee/tasks/test/subscriptions/test_email_subscriptions.py index cbe98e2d896e8..b70a3ae3f76e6 100644 --- a/ee/tasks/test/subscriptions/test_email_subscriptions.py +++ b/ee/tasks/test/subscriptions/test_email_subscriptions.py @@ -45,7 +45,10 @@ def test_new_subscription_delivery(self, MockEmailMessage: MagicMock) -> None: mocked_email_messages = mock_email_messages(MockEmailMessage) send_email_subscription_report( - "test1@posthog.com", self.subscription, [self.asset], invite_message="My invite message" + "test1@posthog.com", + self.subscription, + [self.asset], + invite_message="My invite message", ) assert len(mocked_email_messages) == 1 @@ -60,7 +63,10 @@ def test_should_have_different_text_for_self(self, MockEmailMessage: MagicMock) mocked_email_messages = mock_email_messages(MockEmailMessage) send_email_subscription_report( - self.user.email, self.subscription, [self.asset], invite_message="My invite message" + self.user.email, + self.subscription, + [self.asset], + invite_message="My invite message", ) assert len(mocked_email_messages) == 1 @@ -74,7 +80,11 @@ def test_sends_dashboard_subscription(self, MockEmailMessage: MagicMock) -> None subscription = create_subscription(team=self.team, dashboard=self.dashboard, created_by=self.user) send_email_subscription_report( - self.user.email, subscription, [self.asset], invite_message="My invite message", total_asset_count=10 + self.user.email, + subscription, + [self.asset], + invite_message="My invite message", + total_asset_count=10, ) assert len(mocked_email_messages) == 1 diff --git a/ee/tasks/test/subscriptions/test_slack_subscriptions.py b/ee/tasks/test/subscriptions/test_slack_subscriptions.py index 4703d3e3dc0ef..9770127e73778 100644 --- a/ee/tasks/test/subscriptions/test_slack_subscriptions.py +++ b/ee/tasks/test/subscriptions/test_slack_subscriptions.py @@ -130,7 +130,10 @@ def test_subscription_dashboard_delivery(self, MockSlackIntegration: MagicMock) "image_url": post_message_calls[0].kwargs["blocks"][1]["image_url"], "alt_text": "My Test subscription", }, - {"type": "section", "text": {"type": "mrkdwn", "text": "_See 🧵 for more Insights_"}}, + { + "type": "section", + "text": {"type": "mrkdwn", "text": "_See 🧵 for more Insights_"}, + }, {"type": "divider"}, { "type": "actions", @@ -154,13 +157,21 @@ def test_subscription_dashboard_delivery(self, MockSlackIntegration: MagicMock) assert second_call["channel"] == "C12345" assert second_call["thread_ts"] == "1.234" assert second_call["blocks"] == [ - {"type": "image", "image_url": second_call["blocks"][0]["image_url"], "alt_text": "My Test subscription"} + { + "type": "image", + "image_url": second_call["blocks"][0]["image_url"], + "alt_text": "My Test subscription", + } ] # Third call - other asset third_call = post_message_calls[2].kwargs assert third_call["blocks"] == [ - {"type": "image", "image_url": third_call["blocks"][0]["image_url"], "alt_text": "My Test subscription"} + { + "type": "image", + "image_url": third_call["blocks"][0]["image_url"], + "alt_text": "My Test subscription", + } ] # Fourth call - notice that more exists diff --git a/ee/tasks/test/subscriptions/test_subscriptions.py b/ee/tasks/test/subscriptions/test_subscriptions.py index 8d5d0158678d7..3a63e27ee6ff0 100644 --- a/ee/tasks/test/subscriptions/test_subscriptions.py +++ b/ee/tasks/test/subscriptions/test_subscriptions.py @@ -55,7 +55,12 @@ def test_subscription_delivery_scheduling( create_subscription(team=self.team, insight=self.insight, created_by=self.user), create_subscription(team=self.team, insight=self.insight, created_by=self.user), create_subscription(team=self.team, dashboard=self.dashboard, created_by=self.user), - create_subscription(team=self.team, dashboard=self.dashboard, created_by=self.user, deleted=True), + create_subscription( + team=self.team, + dashboard=self.dashboard, + created_by=self.user, + deleted=True, + ), ] # Modify a subscription to have its target time at least an hour ahead subscriptions[2].start_date = datetime(2022, 1, 1, 10, 0).replace(tzinfo=ZoneInfo("UTC")) @@ -64,7 +69,10 @@ def test_subscription_delivery_scheduling( schedule_all_subscriptions() - assert mock_deliver_task.delay.mock_calls == [call(subscriptions[0].id), call(subscriptions[1].id)] + assert mock_deliver_task.delay.mock_calls == [ + call(subscriptions[0].id), + call(subscriptions[1].id), + ] @patch("ee.tasks.subscriptions.deliver_subscription_report") def test_does_not_schedule_subscription_if_item_is_deleted( @@ -100,7 +108,10 @@ def test_does_not_schedule_subscription_if_item_is_deleted( assert mock_deliver_task.delay.call_count == 0 def test_deliver_subscription_report_email( - self, mock_gen_assets: MagicMock, mock_send_email: MagicMock, mock_send_slack: MagicMock + self, + mock_gen_assets: MagicMock, + mock_send_email: MagicMock, + mock_send_slack: MagicMock, ) -> None: subscription = create_subscription(team=self.team, insight=self.insight, created_by=self.user) mock_gen_assets.return_value = [self.insight], [self.asset] @@ -110,12 +121,27 @@ def test_deliver_subscription_report_email( assert mock_send_email.call_count == 2 assert mock_send_email.call_args_list == [ - call("test1@posthog.com", subscription, [self.asset], invite_message=None, total_asset_count=1), - call("test2@posthog.com", subscription, [self.asset], invite_message=None, total_asset_count=1), + call( + "test1@posthog.com", + subscription, + [self.asset], + invite_message=None, + total_asset_count=1, + ), + call( + "test2@posthog.com", + subscription, + [self.asset], + invite_message=None, + total_asset_count=1, + ), ] def test_handle_subscription_value_change_email( - self, mock_gen_assets: MagicMock, mock_send_email: MagicMock, mock_send_slack: MagicMock + self, + mock_gen_assets: MagicMock, + mock_send_email: MagicMock, + mock_send_slack: MagicMock, ) -> None: subscription = create_subscription( team=self.team, @@ -126,7 +152,9 @@ def test_handle_subscription_value_change_email( mock_gen_assets.return_value = [self.insight], [self.asset] handle_subscription_value_change( - subscription.id, previous_value="test_existing@posthog.com", invite_message="My invite message" + subscription.id, + previous_value="test_existing@posthog.com", + invite_message="My invite message", ) assert mock_send_email.call_count == 1 @@ -142,7 +170,10 @@ def test_handle_subscription_value_change_email( ] def test_deliver_subscription_report_slack( - self, mock_gen_assets: MagicMock, mock_send_email: MagicMock, mock_send_slack: MagicMock + self, + mock_gen_assets: MagicMock, + mock_send_email: MagicMock, + mock_send_slack: MagicMock, ) -> None: subscription = create_subscription( team=self.team, @@ -157,5 +188,10 @@ def test_deliver_subscription_report_slack( assert mock_send_slack.call_count == 1 assert mock_send_slack.call_args_list == [ - call(subscription, [self.asset], total_asset_count=1, is_new_subscription=False) + call( + subscription, + [self.asset], + total_asset_count=1, + is_new_subscription=False, + ) ] diff --git a/ee/tasks/test/subscriptions/test_subscriptions_utils.py b/ee/tasks/test/subscriptions/test_subscriptions_utils.py index decdc8269e1e7..35b2ca350ed8a 100644 --- a/ee/tasks/test/subscriptions/test_subscriptions_utils.py +++ b/ee/tasks/test/subscriptions/test_subscriptions_utils.py @@ -3,7 +3,10 @@ import pytest -from ee.tasks.subscriptions.subscription_utils import DEFAULT_MAX_ASSET_COUNT, generate_assets +from ee.tasks.subscriptions.subscription_utils import ( + DEFAULT_MAX_ASSET_COUNT, + generate_assets, +) from ee.tasks.test.subscriptions.subscriptions_test_factory import create_subscription from posthog.models.dashboard import Dashboard from posthog.models.dashboard_tile import DashboardTile diff --git a/ee/tasks/test/test_auto_rollback_feature_flag.py b/ee/tasks/test/test_auto_rollback_feature_flag.py index f27925dbe3235..c9afe258500ee 100644 --- a/ee/tasks/test/test_auto_rollback_feature_flag.py +++ b/ee/tasks/test/test_auto_rollback_feature_flag.py @@ -187,7 +187,10 @@ def test_check_condition_sentry(self, stats_for_timerange): with freeze_time("2021-08-23T20:00:00.000Z"): self.assertEqual(check_condition(rollback_condition, flag), True) stats_for_timerange.assert_called_once_with( - "2021-08-21T20:00:00", "2021-08-22T20:00:00", "2021-08-22T20:00:00", "2021-08-23T20:00:00" + "2021-08-21T20:00:00", + "2021-08-22T20:00:00", + "2021-08-22T20:00:00", + "2021-08-23T20:00:00", ) stats_for_timerange.reset_mock() @@ -195,5 +198,8 @@ def test_check_condition_sentry(self, stats_for_timerange): with freeze_time("2021-08-25T13:00:00.000Z"): self.assertEqual(check_condition(rollback_condition, flag), False) stats_for_timerange.assert_called_once_with( - "2021-08-21T20:00:00", "2021-08-22T20:00:00", "2021-08-24T13:00:00", "2021-08-25T13:00:00" + "2021-08-21T20:00:00", + "2021-08-22T20:00:00", + "2021-08-24T13:00:00", + "2021-08-25T13:00:00", ) diff --git a/ee/tasks/test/test_calculate_cohort.py b/ee/tasks/test/test_calculate_cohort.py index 216ead6537c56..24cb0a3783f4d 100644 --- a/ee/tasks/test/test_calculate_cohort.py +++ b/ee/tasks/test/test_calculate_cohort.py @@ -12,7 +12,9 @@ from posthog.test.base import ClickhouseTestMixin, _create_event, _create_person -class TestClickhouseCalculateCohort(ClickhouseTestMixin, calculate_cohort_test_factory(_create_event, _create_person)): # type: ignore +class TestClickhouseCalculateCohort( + ClickhouseTestMixin, calculate_cohort_test_factory(_create_event, _create_person) +): # type: ignore @patch("posthog.tasks.calculate_cohort.insert_cohort_from_insight_filter.delay") def test_create_stickiness_cohort(self, _insert_cohort_from_insight_filter): _create_person(team_id=self.team.pk, distinct_ids=["blabla"]) @@ -198,7 +200,14 @@ def test_create_trends_cohort_arg_test(self, _insert_cohort_from_insight_filter) "insight": "TRENDS", "interval": "day", "properties": json.dumps( - [{"key": "$domain", "value": "app.posthog.com", "operator": "icontains", "type": "event"}] + [ + { + "key": "$domain", + "value": "app.posthog.com", + "operator": "icontains", + "type": "event", + } + ] ), } @@ -242,7 +251,12 @@ def test_create_trends_cohort_arg_test(self, _insert_cohort_from_insight_filter) } ], "properties": [ - {"key": "$domain", "value": "app.posthog.com", "operator": "icontains", "type": "event"} + { + "key": "$domain", + "value": "app.posthog.com", + "operator": "icontains", + "type": "event", + } ], "entity_id": "$pageview", "entity_type": "events", @@ -365,7 +379,10 @@ def _create_events(data, event="$pageview"): _create_person( team_id=self.team.pk, distinct_ids=[id], - properties={"name": id, **({"email": "test@posthog.com"} if id == "p1" else {})}, + properties={ + "name": id, + **({"email": "test@posthog.com"} if id == "p1" else {}), + }, ) ) for timestamp in timestamps: diff --git a/ee/tasks/test/test_send_license_usage.py b/ee/tasks/test/test_send_license_usage.py index 8d649fcf7ad38..441179c2c3eb9 100644 --- a/ee/tasks/test/test_send_license_usage.py +++ b/ee/tasks/test/test_send_license_usage.py @@ -6,7 +6,12 @@ from ee.models.license import License from ee.tasks.send_license_usage import send_license_usage from posthog.models.team import Team -from posthog.test.base import APIBaseTest, ClickhouseDestroyTablesMixin, _create_event, flush_persons_and_events +from posthog.test.base import ( + APIBaseTest, + ClickhouseDestroyTablesMixin, + _create_event, + flush_persons_and_events, +) class SendLicenseUsageTest(LicensedTestMixin, ClickhouseDestroyTablesMixin, APIBaseTest): @@ -17,17 +22,42 @@ def test_send_license_usage(self, mock_post, mock_capture): self.license.key = "legacy-key" self.license.save() team2 = Team.objects.create(organization=self.organization) - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z") + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-08T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T12:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T13:01:01Z", + ) _create_event( event="$$internal_metrics_shouldnt_be_billed", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z", ) - _create_event(event="$pageview", team=team2, distinct_id=1, timestamp="2021-10-09T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z") + _create_event( + event="$pageview", + team=team2, + distinct_id=1, + timestamp="2021-10-09T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-10T14:01:01Z", + ) flush_persons_and_events() mockresponse = Mock() @@ -42,7 +72,12 @@ def test_send_license_usage(self, mock_post, mock_capture): mock_capture.assert_called_once_with( self.user.distinct_id, "send license usage data", - {"date": "2021-10-09", "events_count": 3, "license_keys": [self.license.key], "organization_name": "Test"}, + { + "date": "2021-10-09", + "events_count": 3, + "license_keys": [self.license.key], + "organization_name": "Test", + }, groups={"instance": ANY, "organization": str(self.organization.id)}, ) self.assertEqual(License.objects.get().valid_until.isoformat(), "2021-11-10T23:01:00+00:00") @@ -55,17 +90,42 @@ def test_send_license_error(self, mock_post, mock_capture): self.license.save() team2 = Team.objects.create(organization=self.organization) - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z") + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-08T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T12:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T13:01:01Z", + ) _create_event( event="$$internal_metrics_shouldnt_be_billed", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z", ) - _create_event(event="$pageview", team=team2, distinct_id=1, timestamp="2021-10-09T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z") + _create_event( + event="$pageview", + team=team2, + distinct_id=1, + timestamp="2021-10-09T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-10T14:01:01Z", + ) flush_persons_and_events() with self.assertRaises(Exception): send_license_usage() @@ -84,17 +144,42 @@ def test_send_license_usage_already_sent(self, mock_post, mock_capture): self.license.save() team2 = Team.objects.create(organization=self.organization) - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z") + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-08T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T12:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T13:01:01Z", + ) _create_event( event="$$internal_metrics_shouldnt_be_billed", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z", ) - _create_event(event="$pageview", team=team2, distinct_id=1, timestamp="2021-10-09T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z") + _create_event( + event="$pageview", + team=team2, + distinct_id=1, + timestamp="2021-10-09T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-10T14:01:01Z", + ) mockresponse = Mock() mock_post.return_value = mockresponse mockresponse.ok = False @@ -115,17 +200,42 @@ def test_send_license_not_found(self, mock_post, mock_capture): self.license.save() team2 = Team.objects.create(organization=self.organization) - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z") + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-08T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T12:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T13:01:01Z", + ) _create_event( event="$$internal_metrics_shouldnt_be_billed", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z", ) - _create_event(event="$pageview", team=team2, distinct_id=1, timestamp="2021-10-09T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z") + _create_event( + event="$pageview", + team=team2, + distinct_id=1, + timestamp="2021-10-09T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-10T14:01:01Z", + ) flush_persons_and_events() flush_persons_and_events() @@ -141,7 +251,13 @@ def test_send_license_not_found(self, mock_post, mock_capture): mock_capture.assert_called_once_with( self.user.distinct_id, "send license usage data error", - {"error": "", "date": "2021-10-09", "organization_name": "Test", "status_code": 404, "events_count": 3}, + { + "error": "", + "date": "2021-10-09", + "organization_name": "Test", + "status_code": 404, + "events_count": 3, + }, groups={"instance": ANY, "organization": str(self.organization.id)}, ) self.assertEqual(License.objects.get().valid_until.isoformat(), "2021-10-10T22:01:00+00:00") @@ -163,11 +279,36 @@ class SendLicenseUsageNoLicenseTest(APIBaseTest): @patch("requests.post") def test_no_license(self, mock_post): # Same test, we just don't include the LicensedTestMixin so no license - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z") + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-08T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T12:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T13:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-10T14:01:01Z", + ) flush_persons_and_events() diff --git a/ee/urls.py b/ee/urls.py index 02f6028a1adcd..a3851a2807583 100644 --- a/ee/urls.py +++ b/ee/urls.py @@ -65,7 +65,10 @@ def extend_api_router( ) projects_router.register(r"hooks", hooks.HookViewSet, "project_hooks", ["team_id"]) projects_router.register( - r"explicit_members", explicit_team_member.ExplicitTeamMemberViewSet, "project_explicit_members", ["team_id"] + r"explicit_members", + explicit_team_member.ExplicitTeamMemberViewSet, + "project_explicit_members", + ["team_id"], ) project_dashboards_router.register( r"collaborators", diff --git a/eslint-rules/README.md b/eslint-rules/README.md new file mode 100644 index 0000000000000..9d1d8c0667208 --- /dev/null +++ b/eslint-rules/README.md @@ -0,0 +1,3 @@ +# PostHog Custom ESLint rules + +This package contains custom ESLint rules for PostHog's codebase. diff --git a/eslint-rules/index.js b/eslint-rules/index.js new file mode 100644 index 0000000000000..61e49e1b8c393 --- /dev/null +++ b/eslint-rules/index.js @@ -0,0 +1,10 @@ +const { readdirSync } = require('fs') +const { basename } = require('path') + +const ruleFiles = readdirSync('eslint-rules').filter( + (file) => file.endsWith('.js') && file !== 'index.js' && !file.endsWith('test.js') +) + +const rules = Object.fromEntries(ruleFiles.map((file) => [basename(file, '.js'), require('./' + file)])) + +module.exports = { rules } diff --git a/eslint-rules/warn-elements.js b/eslint-rules/warn-elements.js new file mode 100644 index 0000000000000..4013d6090e4a1 --- /dev/null +++ b/eslint-rules/warn-elements.js @@ -0,0 +1,3 @@ +const { rules } = require('eslint-plugin-react') + +module.exports = rules['forbid-elements'] diff --git a/frontend/__snapshots__/components-map--unavailable.png b/frontend/__snapshots__/components-map--unavailable.png new file mode 100644 index 0000000000000..6e49827d9e782 Binary files /dev/null and b/frontend/__snapshots__/components-map--unavailable.png differ diff --git a/frontend/__snapshots__/lemon-ui-icons--shelf-a.png b/frontend/__snapshots__/lemon-ui-icons--shelf-a.png index 9404063d1c5a8..7637d61913028 100644 Binary files a/frontend/__snapshots__/lemon-ui-icons--shelf-a.png and b/frontend/__snapshots__/lemon-ui-icons--shelf-a.png differ diff --git a/frontend/__snapshots__/lemon-ui-icons--shelf-c.png b/frontend/__snapshots__/lemon-ui-icons--shelf-c.png index 73ebbff721b70..cbfeb5aead477 100644 Binary files a/frontend/__snapshots__/lemon-ui-icons--shelf-c.png and b/frontend/__snapshots__/lemon-ui-icons--shelf-c.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png new file mode 100644 index 0000000000000..d3cf126326368 Binary files /dev/null and b/frontend/__snapshots__/posthog-3000-navigation--navigation-3000.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png b/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png new file mode 100644 index 0000000000000..0f29fe159698f Binary files /dev/null and b/frontend/__snapshots__/posthog-3000-navigation--navigation-base.png differ diff --git a/frontend/__snapshots__/posthog-3000-navigation--navigation.png b/frontend/__snapshots__/posthog-3000-navigation--navigation.png new file mode 100644 index 0000000000000..6cbf8a3a5b034 Binary files /dev/null and b/frontend/__snapshots__/posthog-3000-navigation--navigation.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--dashboards.png b/frontend/__snapshots__/posthog-3000-sidebar--dashboards.png index 23fb3a2fe4f02..692ac2b0b1876 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--dashboards.png and b/frontend/__snapshots__/posthog-3000-sidebar--dashboards.png differ diff --git a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags.png b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags.png index 23fb3a2fe4f02..4e20bd742624a 100644 Binary files a/frontend/__snapshots__/posthog-3000-sidebar--feature-flags.png and b/frontend/__snapshots__/posthog-3000-sidebar--feature-flags.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png index bc79f804ca560..6db5ed04bf68a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png index 118d11b614a3f..8d7d747f8990a 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-left-to-right-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png index 5f8f362dc80f4..192aee1058894 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-breakdown-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png index 73a6ac1fbc6d5..1158f400a0d85 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--funnel-top-to-bottom-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png index 25086a8cbda7f..aa0201c92e8b6 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit--webkit.png differ diff --git a/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png b/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png index 1c01944a7c95c..df710f4dd46da 100644 Binary files a/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png and b/frontend/__snapshots__/scenes-app-insights--user-paths-edit.png differ diff --git a/frontend/__snapshots__/scenes-app-notebooks--headings.png b/frontend/__snapshots__/scenes-app-notebooks--headings.png index c06f3964ea328..be3fcafaa9193 100644 Binary files a/frontend/__snapshots__/scenes-app-notebooks--headings.png and b/frontend/__snapshots__/scenes-app-notebooks--headings.png differ diff --git a/frontend/src/globals.d.ts b/frontend/src/globals.d.ts index 17bc65680725a..df0e0e81664ea 100644 --- a/frontend/src/globals.d.ts +++ b/frontend/src/globals.d.ts @@ -6,6 +6,7 @@ declare global { JS_POSTHOG_API_KEY?: string JS_POSTHOG_HOST?: string JS_POSTHOG_SELF_CAPTURE?: boolean + JS_MAPLIBRE_STYLE_URL?: string JS_CAPTURE_TIME_TO_SEE_DATA?: boolean JS_KEA_VERBOSE_LOGGING?: boolean posthog?: posthog diff --git a/frontend/src/layout/FeaturePreviews/FeaturePreviewsModal.stories.tsx b/frontend/src/layout/FeaturePreviews/FeaturePreviewsModal.stories.tsx index 53d2318fc212f..45025c5d8132c 100644 --- a/frontend/src/layout/FeaturePreviews/FeaturePreviewsModal.stories.tsx +++ b/frontend/src/layout/FeaturePreviews/FeaturePreviewsModal.stories.tsx @@ -1,6 +1,6 @@ import { Meta, StoryFn, StoryObj } from '@storybook/react' import { FeaturePreviewsModal as FeaturePreviewsModalComponent } from './FeaturePreviewsModal' -import { useFeatureFlags, useStorybookMocks } from '~/mocks/browser' +import { setFeatureFlags, useStorybookMocks } from '~/mocks/browser' import { EarlyAccessFeature } from 'posthog-js' import { CONSTRAINED_PREVIEWS } from './featurePreviewsLogic' import { FeatureFlagKey } from 'lib/constants' @@ -28,7 +28,7 @@ const Template: StoryFn = ({ earlyAccessFeatures, enabledFeatureFlag 'https://app.posthog.com/api/early_access_features/': { earlyAccessFeatures }, }, }) - useFeatureFlags(enabledFeatureFlags) + setFeatureFlags(enabledFeatureFlags) return (
diff --git a/frontend/src/layout/navigation-3000/Navigation.stories.tsx b/frontend/src/layout/navigation-3000/Navigation.stories.tsx index a46783d69faba..e843af7e75f5f 100644 --- a/frontend/src/layout/navigation-3000/Navigation.stories.tsx +++ b/frontend/src/layout/navigation-3000/Navigation.stories.tsx @@ -1,13 +1,10 @@ import { Meta } from '@storybook/react' -import { mswDecorator } from '~/mocks/browser' +import { mswDecorator, setFeatureFlags } from '~/mocks/browser' import { useEffect } from 'react' import { router } from 'kea-router' import { urls } from 'scenes/urls' import { App } from 'scenes/App' import { EMPTY_PAGINATED_RESPONSE } from '~/mocks/handlers' -import { useActions } from 'kea' -import { themeLogic } from './themeLogic' -import { with3000 } from 'storybook/decorators/with3000' const meta: Meta = { title: 'PostHog 3000/Navigation', @@ -21,7 +18,6 @@ const meta: Meta = { '/api/projects/:team_id/session_recordings/': EMPTY_PAGINATED_RESPONSE, }, }), - with3000, ], parameters: { layout: 'fullscreen', @@ -30,21 +26,20 @@ const meta: Meta = { }, } export default meta -export function LightMode(): JSX.Element { - const { overrideTheme } = useActions(themeLogic) + +export function NavigationBase(): JSX.Element { + setFeatureFlags(['posthog-3000']) useEffect(() => { router.actions.push(urls.projectHomepage()) - overrideTheme(false) }, []) return } -export function DarkMode(): JSX.Element { - const { overrideTheme } = useActions(themeLogic) +export function Navigation3000(): JSX.Element { + setFeatureFlags(['posthog-3000', 'posthog-3000-nav']) useEffect(() => { router.actions.push(urls.projectHomepage()) - overrideTheme(true) }, []) return diff --git a/frontend/src/layout/navigation-3000/components/Sidebar.stories.tsx b/frontend/src/layout/navigation-3000/components/Sidebar.stories.tsx index 4176e5a156a15..0927f46babe33 100644 --- a/frontend/src/layout/navigation-3000/components/Sidebar.stories.tsx +++ b/frontend/src/layout/navigation-3000/components/Sidebar.stories.tsx @@ -7,8 +7,8 @@ import { navigation3000Logic } from '../navigationLogic' import { Sidebar } from './Sidebar' import featureFlagsJson from '../../../scenes/feature-flags/__mocks__/feature_flags.json' import dashboardsJson from '../../../scenes/dashboard/__mocks__/dashboards.json' -import { with3000 } from 'storybook/decorators/with3000' import { SidebarNavbarItem } from '../types' +import { setFeatureFlags } from '~/mocks/browser' const meta: Meta = { title: 'PostHog 3000/Sidebar', @@ -17,7 +17,6 @@ const meta: Meta = { layout: 'fullscreen', viewMode: 'story', }, - decorators: [with3000], } export default meta /** featureFlagsJson * 6 to fill the sidebar up more. */ @@ -33,6 +32,7 @@ const multipliedFeatureFlagsJson = { } export function Dashboards(): JSX.Element { + setFeatureFlags(['posthog-3000', 'posthog-3000-nav']) useStorybookMocks({ get: { '/api/projects/:team_id/dashboards/': dashboardsJson, @@ -52,6 +52,7 @@ export function Dashboards(): JSX.Element { } export function FeatureFlags(): JSX.Element { + setFeatureFlags(['posthog-3000', 'posthog-3000-nav']) useStorybookMocks({ get: { '/api/projects/:team_id/feature_flags/': multipliedFeatureFlagsJson, diff --git a/frontend/src/lib/components/Errors/ErrorDisplay.tsx b/frontend/src/lib/components/Errors/ErrorDisplay.tsx index 46a7e4b74a05c..4c14a6e44412a 100644 --- a/frontend/src/lib/components/Errors/ErrorDisplay.tsx +++ b/frontend/src/lib/components/Errors/ErrorDisplay.tsx @@ -2,8 +2,8 @@ import { EventType, RecordingEventType } from '~/types' import { LemonTag } from 'lib/lemon-ui/LemonTag/LemonTag' import { IconFlag } from 'lib/lemon-ui/icons' import clsx from 'clsx' -import posthog from 'posthog-js' import { Link } from 'lib/lemon-ui/Link' +import posthog from 'posthog-js' interface StackFrame { filename: string @@ -43,7 +43,7 @@ function StackTrace({ rawTrace }: { rawTrace: string }): JSX.Element | null { ) } catch (e: any) { //very meta - posthog.captureException(e, { tag: 'error-display-stack-trace' }) + posthog.capture('Cannot parse stack trace in Exception event', { tag: 'error-display-stack-trace', e }) return Error parsing stack trace } } diff --git a/frontend/src/lib/components/Map/Map.stories.tsx b/frontend/src/lib/components/Map/Map.stories.tsx new file mode 100644 index 0000000000000..09576ec0ed50c --- /dev/null +++ b/frontend/src/lib/components/Map/Map.stories.tsx @@ -0,0 +1,31 @@ +import type { Meta, StoryObj } from '@storybook/react' +import { Marker } from 'maplibre-gl' + +import { Map, MapComponent } from './Map' + +const meta: Meta = { + title: 'Components/Map', + component: Map, + tags: ['autodocs'], +} +type Story = StoryObj + +const coordinates: [number, number] = [0.119167, 52.205276] + +export const Unavailable: Story = {} + +export const Basic: Story = { + render: (args) => ( + + ), + args: { + center: coordinates, + markers: [new Marker({ color: 'var(--primary)' }).setLngLat(coordinates)], + className: 'h-60', + }, +} + +export default meta diff --git a/frontend/src/lib/components/Map/Map.tsx b/frontend/src/lib/components/Map/Map.tsx new file mode 100644 index 0000000000000..0b72d136d04d4 --- /dev/null +++ b/frontend/src/lib/components/Map/Map.tsx @@ -0,0 +1,64 @@ +import { useEffect, useRef } from 'react' +import { Map as RawMap, Marker } from 'maplibre-gl' +import useResizeObserver from 'use-resize-observer' + +import 'maplibre-gl/dist/maplibre-gl.css' + +/** Latitude and longtitude in degrees (+lat is east, -lat is west, +lon is south, -lon is north). */ +export interface MapProps { + /** Coordinates to center the map on by default. */ + center: [number, number] + /** Markers to show. */ + markers?: Marker[] + /** Map container class names. */ + className?: string + /** The map's MapLibre style. This must be a JSON object conforming to the schema described in the MapLibre Style Specification, or a URL to such JSON. */ + mapLibreStyleUrl: string +} + +export function Map({ className, ...rest }: Omit): JSX.Element { + if (!window.JS_MAPLIBRE_STYLE_URL) { + return ( +
+

Map unavailable

+

+ The MAPLIBRE_STYLE_URL setting is not defined. Please configure this setting with a + valid MapLibre Style URL to display maps. +

+
+ ) + } + + return +} + +export function MapComponent({ center, markers, className, mapLibreStyleUrl }: MapProps): JSX.Element { + const mapContainer = useRef(null) + const map = useRef(null) + + useEffect(() => { + map.current = new RawMap({ + container: mapContainer.current as HTMLElement, + style: mapLibreStyleUrl, + center, + zoom: 4, + maxZoom: 10, + }) + if (markers) { + for (const marker of markers) { + marker.addTo(map.current) + } + } + }, []) + + useResizeObserver({ + ref: mapContainer, + onResize: () => { + if (map.current) { + map.current.resize() + } + }, + }) + + return
+} diff --git a/frontend/src/lib/components/PayGateMini/PayGateMini.tsx b/frontend/src/lib/components/PayGateMini/PayGateMini.tsx index 477fda2461dca..6c02869d74613 100644 --- a/frontend/src/lib/components/PayGateMini/PayGateMini.tsx +++ b/frontend/src/lib/components/PayGateMini/PayGateMini.tsx @@ -120,12 +120,12 @@ export function PayGateMini({
{featureSummary.icon || }
{featureSummary.description}
- Subscribe to gain {featureSummary.umbrella}. + Subscribe to gain {featureSummary.umbrella}. {featureSummary.docsHref && ( <> {' '} - Learn more in PostHog Docs. + Learn more in PostHog Docs. )} diff --git a/frontend/src/lib/components/Support/supportLogic.ts b/frontend/src/lib/components/Support/supportLogic.ts index ea9b146e7d777..4a9016acce670 100644 --- a/frontend/src/lib/components/Support/supportLogic.ts +++ b/frontend/src/lib/components/Support/supportLogic.ts @@ -57,6 +57,7 @@ export const TARGET_AREA_TO_NAME = { session_replay: 'Session Replay (Recordings)', toolbar: 'Toolbar & heatmaps', surveys: 'Surveys', + web_analytics: 'Web Analytics', } export const SUPPORT_KIND_TO_SUBJECT = { @@ -85,6 +86,7 @@ export const URL_PATH_TO_TARGET_AREA: Record = toolbar: 'session_replay', warehouse: 'data_warehouse', surveys: 'surveys', + web: 'web_analytics', } export function getURLPathToTargetArea(pathname: string): SupportTicketTargetArea | null { diff --git a/frontend/src/lib/constants.tsx b/frontend/src/lib/constants.tsx index 98d4142baa108..6343b341614ef 100644 --- a/frontend/src/lib/constants.tsx +++ b/frontend/src/lib/constants.tsx @@ -165,7 +165,9 @@ export const FEATURE_FLAGS = { SURVEYS_PAYGATES: 'surveys-paygates', CONSOLE_RECORDING_SEARCH: 'console-recording-search', // owner: #team-monitoring PERSONS_HOGQL_QUERY: 'persons-hogql-query', // owner: @mariusandra + NOTEBOOK_CANVASES: 'notebook-canvases', // owner: #team-monitoring SESSION_RECORDING_SAMPLING: 'session-recording-sampling', // owner: #team-monitoring + PERSON_FEED_CANVAS: 'person-feed-canvas', // owner: #project-canvas } as const export type FeatureFlagKey = (typeof FEATURE_FLAGS)[keyof typeof FEATURE_FLAGS] diff --git a/frontend/src/lib/lemon-ui/LemonCalendarRange/LemonCalendarRangeInline.tsx b/frontend/src/lib/lemon-ui/LemonCalendarRange/LemonCalendarRangeInline.tsx index 698d891ac33cf..8a25fd4d1fae1 100644 --- a/frontend/src/lib/lemon-ui/LemonCalendarRange/LemonCalendarRangeInline.tsx +++ b/frontend/src/lib/lemon-ui/LemonCalendarRange/LemonCalendarRangeInline.tsx @@ -33,6 +33,7 @@ export function LemonCalendarRangeInline({ // How many months fit on the screen, capped between 1..2 function getMonthCount(): number { const width = + // eslint-disable-next-line valid-typeof typeof window === undefined ? WIDTH_OF_ONE_CALENDAR_MONTH * CALENDARS_IF_NO_WINDOW : window.innerWidth return Math.min(Math.max(1, Math.floor(width / WIDTH_OF_ONE_CALENDAR_MONTH)), 2) } diff --git a/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx b/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx index 44e0a952e9646..c00145d40f449 100644 --- a/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx +++ b/frontend/src/lib/lemon-ui/LemonMenu/LemonMenu.tsx @@ -98,7 +98,7 @@ export function LemonMenu({ ) const _onVisibilityChange = useCallback( - (visible) => { + (visible: boolean) => { onVisibilityChange?.(visible) if (visible && activeItemIndex && activeItemIndex > -1) { // Scroll the active item into view once the menu is open (i.e. in the next tick) @@ -256,7 +256,7 @@ const LemonMenuItemButton: FunctionComponent - {label} + {label as string | JSX.Element} {keyboardShortcut && (
{/* Show the keyboard shortcut on the right */} diff --git a/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx b/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx index 8b5c6ab2cdf70..5686b6af19412 100644 --- a/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx +++ b/frontend/src/lib/lemon-ui/LemonSelect/LemonSelect.tsx @@ -93,7 +93,7 @@ export interface LemonSelectPropsNonClearable extends LemonSelectPropsBase export type LemonSelectProps = LemonSelectPropsClearable | LemonSelectPropsNonClearable -export function LemonSelect({ +export function LemonSelect({ value = null, onChange, onSelect, diff --git a/frontend/src/lib/lemon-ui/LemonTable/TableRow.tsx b/frontend/src/lib/lemon-ui/LemonTable/TableRow.tsx index e67c574711ec9..6071cc7b08183 100644 --- a/frontend/src/lib/lemon-ui/LemonTable/TableRow.tsx +++ b/frontend/src/lib/lemon-ui/LemonTable/TableRow.tsx @@ -132,6 +132,8 @@ function TableRowRaw>({ // of a class indicating that scrollability to `table` caused the component to lag due to unneded rerendering of rows. export const TableRow = React.memo(TableRowRaw) as typeof TableRowRaw -function isTableCellRepresentation(contents: React.ReactNode): contents is TableCellRepresentation { +function isTableCellRepresentation( + contents: React.ReactNode | TableCellRepresentation +): contents is TableCellRepresentation { return !!contents && typeof contents === 'object' && !React.isValidElement(contents) } diff --git a/frontend/src/lib/lemon-ui/hooks.ts b/frontend/src/lib/lemon-ui/hooks.ts index ba4afca720d23..32625c2d499bd 100644 --- a/frontend/src/lib/lemon-ui/hooks.ts +++ b/frontend/src/lib/lemon-ui/hooks.ts @@ -6,7 +6,7 @@ import { useLayoutEffect, useRef, useState } from 'react' * @private */ export function useSliderPositioning( - currentValue: string | number | null | undefined, + currentValue: React.Key | null | undefined, transitionMs: number ): { containerRef: React.RefObject diff --git a/frontend/src/lib/lemon-ui/icons/icons.tsx b/frontend/src/lib/lemon-ui/icons/icons.tsx index 55115bab2944b..8d5048e4aad10 100644 --- a/frontend/src/lib/lemon-ui/icons/icons.tsx +++ b/frontend/src/lib/lemon-ui/icons/icons.tsx @@ -2472,3 +2472,25 @@ export function IconNotebook(props: LemonIconProps): JSX.Element { ) } + +export function IconCode(props: LemonIconProps): JSX.Element { + return ( + + + + ) +} + +export function IconAdsClick(props: LemonIconProps): JSX.Element { + return ( + + + + ) +} diff --git a/frontend/src/mocks/browser.tsx b/frontend/src/mocks/browser.tsx index adf00a8cc8125..6baf8552fa047 100644 --- a/frontend/src/mocks/browser.tsx +++ b/frontend/src/mocks/browser.tsx @@ -30,6 +30,6 @@ export const mswDecorator = (mocks: Mocks): DecoratorFunction => { } } -export const useFeatureFlags = (featureFlags: string[]): void => { +export const setFeatureFlags = (featureFlags: string[]): void => { ;(window as any).POSTHOG_APP_CONTEXT.persisted_feature_flags = featureFlags } diff --git a/frontend/src/queries/nodes/DataTable/renderColumn.tsx b/frontend/src/queries/nodes/DataTable/renderColumn.tsx index e24f9ce1cef80..7333f6f42fcca 100644 --- a/frontend/src/queries/nodes/DataTable/renderColumn.tsx +++ b/frontend/src/queries/nodes/DataTable/renderColumn.tsx @@ -5,7 +5,7 @@ import { Link } from 'lib/lemon-ui/Link' import { TZLabel } from 'lib/components/TZLabel' import { Property } from 'lib/components/Property' import { urls } from 'scenes/urls' -import { PersonDisplay } from 'scenes/persons/PersonDisplay' +import { PersonDisplay, PersonDisplayProps } from 'scenes/persons/PersonDisplay' import { DataTableNode, EventsQueryPersonColumn, HasPropertiesNode } from '~/queries/schema' import { QueryContext } from '~/queries/types' @@ -204,27 +204,29 @@ export function renderColumn( ) } return - } else if (key === 'person' && isEventsQuery(query.source)) { - const personRecord = value as EventsQueryPersonColumn - return personRecord.distinct_id ? ( - - ) : ( - - ) - } else if (key === 'person' && isPersonsNode(query.source)) { + } else if (key === 'person') { const personRecord = record as PersonType - return ( - - - - ) - } else if (key === 'person' && isPersonsQuery(query.source)) { - const personRecord = value as PersonType - return ( - - - - ) + + const displayProps: PersonDisplayProps = { + withIcon: true, + person: record as PersonType, + noPopover: true, + } + + if (isEventsQuery(query.source)) { + displayProps.person = value.distinct_id ? (value as EventsQueryPersonColumn) : value + displayProps.noPopover = false // If we are in an events list, the popover experience is better + } + + if (isPersonsNode(query.source) && personRecord.distinct_ids) { + displayProps.href = urls.personByDistinctId(personRecord.distinct_ids[0]) + } + + if (isPersonsQuery(query.source)) { + displayProps.href = urls.personByUUID(personRecord.id ?? '-') + } + + return } else if (key === 'person.$delete' && (isPersonsNode(query.source) || isPersonsQuery(query.source))) { const personRecord = record as PersonType return diff --git a/frontend/src/scenes/appScenes.ts b/frontend/src/scenes/appScenes.ts index 93c78f7a96e69..494f7668f4bfe 100644 --- a/frontend/src/scenes/appScenes.ts +++ b/frontend/src/scenes/appScenes.ts @@ -79,6 +79,7 @@ export const appScenes: Record any> = { [Scene.Feedback]: () => import('./feedback/Feedback'), [Scene.Notebooks]: () => import('./notebooks/NotebooksScene'), [Scene.Notebook]: () => import('./notebooks/NotebookScene'), + [Scene.Canvas]: () => import('./notebooks/NotebookCanvasScene'), [Scene.Products]: () => import('./products/Products'), [Scene.Onboarding]: () => import('./onboarding/Onboarding'), } diff --git a/frontend/src/scenes/billing/BillingLimitInput.tsx b/frontend/src/scenes/billing/BillingLimitInput.tsx index f644f22dd8f86..6f1dd652a4acb 100644 --- a/frontend/src/scenes/billing/BillingLimitInput.tsx +++ b/frontend/src/scenes/billing/BillingLimitInput.tsx @@ -25,12 +25,14 @@ export const BillingLimitInput = ({ product }: { product: BillingProductV2Type } if (value === undefined) { return actuallyUpdateLimit() } - const productAndAddonTiers: BillingV2TierType[][] = [ - product.tiers, - ...product.addons - ?.filter((addon: BillingProductV2AddonType) => addon.subscribed) - ?.map((addon: BillingProductV2AddonType) => addon.tiers), - ].filter(Boolean) as BillingV2TierType[][] + + const addonTiers = product.addons + ?.filter((addon: BillingProductV2AddonType) => addon.subscribed) + ?.map((addon: BillingProductV2AddonType) => addon.tiers) + + const productAndAddonTiers: BillingV2TierType[][] = [product.tiers, ...addonTiers].filter( + Boolean + ) as BillingV2TierType[][] const newAmountAsUsage = product.tiers ? convertAmountToUsage(`${value}`, productAndAddonTiers, billing?.discount_percent) diff --git a/frontend/src/scenes/billing/billingProductLogic.ts b/frontend/src/scenes/billing/billingProductLogic.ts index 7b26b0562f68f..07641dbb725c6 100644 --- a/frontend/src/scenes/billing/billingProductLogic.ts +++ b/frontend/src/scenes/billing/billingProductLogic.ts @@ -101,12 +101,12 @@ export const billingProductLogic = kea([ (billing, product, isEditingBillingLimit, billingLimitInput, customLimitUsd) => { // cast the product as a product, not an addon, to avoid TS errors. This is fine since we're just getting the tiers. product = product as BillingProductV2Type - const productAndAddonTiers: BillingV2TierType[][] = [ - product.tiers, - ...product.addons - ?.filter((addon: BillingProductV2AddonType) => addon.subscribed) - ?.map((addon: BillingProductV2AddonType) => addon.tiers), - ].filter(Boolean) as BillingV2TierType[][] + const addonTiers = product.addons + ?.filter((addon: BillingProductV2AddonType) => addon.subscribed) + ?.map((addon: BillingProductV2AddonType) => addon.tiers) + const productAndAddonTiers: BillingV2TierType[][] = [product.tiers, ...addonTiers].filter( + Boolean + ) as BillingV2TierType[][] return product.tiers ? isEditingBillingLimit ? convertAmountToUsage(`${billingLimitInput}`, productAndAddonTiers, billing?.discount_percent) diff --git a/frontend/src/scenes/experiments/experimentLogic.tsx b/frontend/src/scenes/experiments/experimentLogic.tsx index 9e9068689e1fc..b75e1c68251a0 100644 --- a/frontend/src/scenes/experiments/experimentLogic.tsx +++ b/frontend/src/scenes/experiments/experimentLogic.tsx @@ -802,7 +802,7 @@ export const experimentLogic = kea([ let index = -1 if (insightType === InsightType.FUNNELS) { // Funnel Insight is displayed in order of decreasing count - index = ([...experimentResults?.insight] as FunnelStep[][]) + index = ([...experimentResults.insight] as FunnelStep[][]) .sort((a, b) => b[0]?.count - a[0]?.count) .findIndex( (variantFunnel: FunnelStep[]) => variantFunnel[0]?.breakdown_value?.[0] === variant diff --git a/frontend/src/scenes/feature-flags/featureFlagLogic.ts b/frontend/src/scenes/feature-flags/featureFlagLogic.ts index 0ea7e1154bbcf..172ca75db8329 100644 --- a/frontend/src/scenes/feature-flags/featureFlagLogic.ts +++ b/frontend/src/scenes/feature-flags/featureFlagLogic.ts @@ -266,7 +266,10 @@ export const featureFlagLogic = kea([ if (!state) { return state } - const groups = [...state?.filters.groups, { properties: [], rollout_percentage: 0, variant: null }] + const groups = [ + ...(state?.filters?.groups || []), + { properties: [], rollout_percentage: 0, variant: null }, + ] return { ...state, filters: { ...state.filters, groups } } }, addRollbackCondition: (state) => { @@ -291,7 +294,7 @@ export const featureFlagLogic = kea([ return state } - const groups = [...state?.filters.groups] + const groups = [...(state?.filters?.groups || [])] if (newRolloutPercentage !== undefined) { groups[index] = { ...groups[index], rollout_percentage: newRolloutPercentage } } diff --git a/frontend/src/scenes/insights/__mocks__/createInsightScene.tsx b/frontend/src/scenes/insights/__mocks__/createInsightScene.tsx index 302e148caf4ef..7326a262550eb 100644 --- a/frontend/src/scenes/insights/__mocks__/createInsightScene.tsx +++ b/frontend/src/scenes/insights/__mocks__/createInsightScene.tsx @@ -1,5 +1,5 @@ import { InsightModel } from '~/types' -import { useFeatureFlags, useStorybookMocks } from '~/mocks/browser' +import { setFeatureFlags, useStorybookMocks } from '~/mocks/browser' import { useEffect } from 'react' import { router } from 'kea-router' import { App } from 'scenes/App' @@ -27,7 +27,7 @@ export function createInsightStory( ], }, }) - useFeatureFlags([FEATURE_FLAGS.RETENTION_BREAKDOWN]) + setFeatureFlags([FEATURE_FLAGS.RETENTION_BREAKDOWN]) useEffect(() => { router.actions.push(`/insights/${insight.short_id}${count}${mode === 'edit' ? '/edit' : ''}`) diff --git a/frontend/src/scenes/notebooks/Marks/NotebookMarkLink.tsx b/frontend/src/scenes/notebooks/Marks/NotebookMarkLink.tsx index 52a66b633af6d..f50e1290734a2 100644 --- a/frontend/src/scenes/notebooks/Marks/NotebookMarkLink.tsx +++ b/frontend/src/scenes/notebooks/Marks/NotebookMarkLink.tsx @@ -59,5 +59,10 @@ export const NotebookMarkLink = Mark.create({ }) const isPostHogLink = (href: string): boolean => { - return new URL(href, window.location.origin).origin === window.location.origin + try { + const url = new URL(href, window.location.origin) + return url.origin === window.location.origin + } catch { + return false + } } diff --git a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.scss b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.scss index 58572fec9bd99..8a96021abd209 100644 --- a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.scss +++ b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.scss @@ -4,7 +4,6 @@ --border-color: var(--border); transform: translate3d(0, 0, 0); - margin: 0.65rem 0px 0.35rem 0px; .NotebookNode__box { transform: translate3d(0, 0, 0); @@ -12,6 +11,7 @@ border-radius: var(--radius); background-color: var(--bg-light); transition: border 150ms linear, margin-bottom 150ms linear; + overflow: hidden; .NotebookNode__meta { display: flex; @@ -33,24 +33,24 @@ } } - .NotebookNode__actions { + .NotebookNode__gap { display: flex; gap: 0.25rem; overflow: hidden; - transition: all 150ms linear 1000ms; opacity: 0; - height: 0; - margin-top: 0; + height: 1rem; + align-items: center; } - &:hover, - &--selected { - .NotebookNode__actions { - opacity: 1; - height: 2rem; - margin-top: 0.5rem; - transition: all 150ms linear; + &--has-actions { + &:hover, + &--selected { + .NotebookNode__gap { + opacity: 1; + height: 2.5rem; + transition: all 150ms linear; + } } } diff --git a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx index da6c679877f03..fea1b437f3d21 100644 --- a/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NodeWrapper.tsx @@ -8,7 +8,7 @@ import { NodeViewProps, getExtensionField, } from '@tiptap/react' -import { memo, useCallback, useRef } from 'react' +import { memo, useCallback, useEffect, useRef } from 'react' import clsx from 'clsx' import { IconClose, @@ -22,10 +22,10 @@ import { import { LemonButton } from '@posthog/lemon-ui' import './NodeWrapper.scss' import { LemonSkeleton } from 'lib/lemon-ui/LemonSkeleton' -import { BindLogic, useActions, useMountedLogic, useValues } from 'kea' +import { BindLogic, BuiltLogic, useActions, useMountedLogic, useValues } from 'kea' import { notebookLogic } from '../Notebook/notebookLogic' import { useInView } from 'react-intersection-observer' -import { NotebookNodeType } from '~/types' +import { NotebookNodeResource } from '~/types' import { ErrorBoundary } from '~/layout/ErrorBoundary' import { NotebookNodeContext, NotebookNodeLogicProps, notebookNodeLogic } from './notebookNodeLogic' import { posthogNodePasteRule, useSyncedAttributes } from './utils' @@ -37,45 +37,38 @@ import { } from '../Notebook/utils' import { useWhyDidIRender } from 'lib/hooks/useWhyDidIRender' import { NotebookNodeTitle } from './components/NotebookNodeTitle' +import { notebookNodeLogicType } from './notebookNodeLogicType' + +// TODO: fix the typing of string to NotebookNodeType +const KNOWN_NODES: Record> = {} + +type NodeWrapperProps = Omit & + NotebookNodeProps & { + Component: (props: NotebookNodeProps) => JSX.Element | null + + // View only props + href?: string | ((attributes: NotebookNodeAttributes) => string | undefined) + expandable?: boolean + selected?: boolean + heightEstimate?: number | string + minHeight?: number | string + /** If true the metadata area will only show when hovered if in editing mode */ + autoHideMetadata?: boolean + /** Expand the node if the component is clicked */ + expandOnClick?: boolean + } -export interface NodeWrapperProps { - nodeType: NotebookNodeType - Component: (props: NotebookNodeProps) => JSX.Element | null - - // Meta properties - these should never be too advanced - more advanced should be done via updateAttributes in the component - titlePlaceholder: string - href?: string | ((attributes: NotebookNodeAttributes) => string | undefined) - - // Sizing - expandable?: boolean - startExpanded?: boolean - resizeable?: boolean | ((attributes: CustomNotebookNodeAttributes) => boolean) - heightEstimate?: number | string - minHeight?: number | string - /** If true the metadata area will only show when hovered if in editing mode */ - autoHideMetadata?: boolean - /** Expand the node if the component is clicked */ - expandOnClick?: boolean - settings?: NotebookNodeSettings -} - -function NodeWrapper( - props: NodeWrapperProps & NotebookNodeProps & Omit -): JSX.Element { +function NodeWrapper(props: NodeWrapperProps): JSX.Element { const { - titlePlaceholder, nodeType, Component, selected, href, heightEstimate = '4rem', - resizeable: resizeableOrGenerator = true, - startExpanded = false, expandable = true, expandOnClick = true, autoHideMetadata = false, minHeight, - node, getPos, attributes, updateAttributes, @@ -85,27 +78,23 @@ function NodeWrapper( useWhyDidIRender('NodeWrapper.props', props) const mountedNotebookLogic = useMountedLogic(notebookLogic) - const { isEditable, editingNodeId } = useValues(notebookLogic) - const { setTextSelection } = useActions(notebookLogic) + const { isEditable, editingNodeId } = useValues(mountedNotebookLogic) + const { unregisterNodeLogic } = useActions(notebookLogic) - // nodeId can start null, but should then immediately be generated - const nodeId = attributes.nodeId - const nodeLogicProps: NotebookNodeLogicProps = { - node, - nodeType, - attributes, - updateAttributes, - nodeId, + const logicProps: NotebookNodeLogicProps = { + ...props, notebookLogic: mountedNotebookLogic, - getPos, - resizeable: resizeableOrGenerator, - settings, - startExpanded, - titlePlaceholder, } - const nodeLogic = useMountedLogic(notebookNodeLogic(nodeLogicProps)) - const { resizeable, expanded, actions } = useValues(nodeLogic) - const { setExpanded, deleteNode, toggleEditing } = useActions(nodeLogic) + + // nodeId can start null, but should then immediately be generated + const nodeLogic = useMountedLogic(notebookNodeLogic(logicProps)) + const { resizeable, expanded, actions, nodeId } = useValues(nodeLogic) + const { setExpanded, deleteNode, toggleEditing, insertOrSelectNextLine } = useActions(nodeLogic) + + useEffect(() => { + // TRICKY: child nodes mount the parent logic so we need to control the mounting / unmounting directly in this component + return () => unregisterNodeLogic(nodeId) + }, []) useWhyDidIRender('NodeWrapper.logicProps', { resizeable, @@ -142,20 +131,29 @@ function NodeWrapper( window.addEventListener('mouseup', onResizedEnd) }, [resizeable, updateAttributes]) + const onActionsAreaClick = (): void => { + // Clicking in the area of the actions without selecting a specific action likely indicates the user wants to + // add new content below. If we are in editing mode, we should select the next line if there is one, otherwise + insertOrSelectNextLine() + // setTextSelection(getPos() + 1) + } + const parsedHref = typeof href === 'function' ? href(attributes) : href // Element is resizable if resizable is set to true. If expandable is set to true then is is only resizable if expanded is true const isResizeable = resizeable && (!expandable || expanded) + const isDraggable = !!(isEditable && getPos) return ( - +
@@ -172,7 +170,7 @@ function NodeWrapper( <>
- {isEditable && ( + {isDraggable && ( )} @@ -229,29 +227,31 @@ function NodeWrapper( )}
- {isEditable && actions.length ? ( -
setTextSelection(getPos() + 1)} - > - {actions.map((x, i) => ( - } - onClick={(e) => { - e.stopPropagation() - x.onClick() - }} - > - {x.text} - - ))} -
- ) : null} +
onActionsAreaClick()} + > + {getPos && isEditable && actions.length ? ( + <> + {actions.map((x, i) => ( + } + onClick={(e) => { + e.stopPropagation() + x.onClick() + }} + > + {x.text} + + ))} + + ) : null} +
@@ -259,10 +259,12 @@ function NodeWrapper( ) } -const MemoizedNodeWrapper = memo(NodeWrapper) as typeof NodeWrapper +export const MemoizedNodeWrapper = memo(NodeWrapper) as typeof NodeWrapper -export type CreatePostHogWidgetNodeOptions = NodeWrapperProps & { - nodeType: NotebookNodeType +export type CreatePostHogWidgetNodeOptions = Omit< + NodeWrapperProps, + 'updateAttributes' +> & { Component: (props: NotebookNodeProps) => JSX.Element | null pasteOptions?: { find: string @@ -273,13 +275,13 @@ export type CreatePostHogWidgetNodeOptions) => string } -export function createPostHogWidgetNode({ - Component, - pasteOptions, - attributes, - serializedText, - ...wrapperProps -}: CreatePostHogWidgetNodeOptions): Node { +export function createPostHogWidgetNode( + options: CreatePostHogWidgetNodeOptions +): Node { + const { Component, pasteOptions, attributes, serializedText, ...wrapperProps } = options + + KNOWN_NODES[wrapperProps.nodeType] = options + // NOTE: We use NodeViewProps here as we convert them to NotebookNodeProps const WrappedComponent = (props: NodeViewProps): JSX.Element => { useWhyDidIRender('NodeWrapper(WrappedComponent)', props) @@ -330,6 +332,7 @@ export function createPostHogWidgetNode( default: null, }, __init: { default: null }, + children: {}, ...attributes, } }, @@ -363,3 +366,36 @@ export function createPostHogWidgetNode( }, }) } + +export const NotebookNodeChildRenderer = ({ + nodeLogic, + content, +}: { + nodeLogic: BuiltLogic + content: NotebookNodeResource +}): JSX.Element => { + const options = KNOWN_NODES[content.type] + + // eslint-disable-next-line no-console + console.log(nodeLogic) + // TODO: Respect attr changes + + // TODO: Allow deletion + + return ( + { + // eslint-disable-next-line no-console + console.log('updated called (TODO)', newAttrs) + }} + selected={false} + /> + ) + // return +} diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeMap.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeMap.tsx new file mode 100644 index 0000000000000..f0e5ef1931c7d --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeMap.tsx @@ -0,0 +1,64 @@ +import { Marker } from 'maplibre-gl' + +import { NotebookNodeType } from '~/types' +import { createPostHogWidgetNode } from 'scenes/notebooks/Nodes/NodeWrapper' +import { personLogic } from 'scenes/persons/personLogic' +import { useValues } from 'kea' +import { LemonSkeleton } from '@posthog/lemon-ui' +import { NotFound } from 'lib/components/NotFound' +import { Map } from '../../../lib/components/Map/Map' +import { notebookNodeLogic } from './notebookNodeLogic' +import { NotebookNodeProps } from 'scenes/notebooks/Notebook/utils' +import { NotebookNodeEmptyState } from './components/NotebookNodeEmptyState' + +const Component = ({ attributes }: NotebookNodeProps): JSX.Element | null => { + const { id } = attributes + const { expanded } = useValues(notebookNodeLogic) + + const logic = personLogic({ id }) + const { person, personLoading } = useValues(logic) + + if (personLoading) { + return + } else if (!person) { + return + } + + if (!expanded) { + return null + } + + const longtitude = person?.properties?.['$geoip_longitude'] + const latitude = person?.properties?.['$geoip_latitude'] + const personCoordinates: [number, number] | null = + !isNaN(longtitude) && !isNaN(latitude) ? [longtitude, latitude] : null + + if (!personCoordinates) { + return + } + + return ( + + ) +} + +type NotebookNodeMapAttributes = { + id: string +} + +export const NotebookNodeMap = createPostHogWidgetNode({ + nodeType: NotebookNodeType.Map, + titlePlaceholder: 'Location', + Component, + resizeable: true, + heightEstimate: 150, + expandable: true, + startExpanded: true, + attributes: { + id: {}, + }, +}) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx index 56d31294ccc07..d54a338c14b5d 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePerson.tsx @@ -103,7 +103,12 @@ const Component = ({ attributes }: NotebookNodeProps -
+
{personLoading ? ( ) : ( @@ -152,7 +157,8 @@ export const NotebookNodePerson = createPostHogWidgetNode urls.personByDistinctId(attrs.id), resizeable: true, attributes: { diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/EventIcon.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/EventIcon.tsx new file mode 100644 index 0000000000000..abab74d367546 --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/EventIcon.tsx @@ -0,0 +1,32 @@ +import { EventType } from '~/types' + +import { Tooltip } from '@posthog/lemon-ui' +import { IconAdsClick, IconExclamation, IconEyeHidden, IconEyeVisible, IconCode } from 'lib/lemon-ui/icons' +import { KEY_MAPPING } from 'lib/taxonomy' + +type EventIconProps = { event: EventType } + +export const EventIcon = ({ event }: EventIconProps): JSX.Element => { + let Component: React.ComponentType<{ className: string }> + switch (event.event) { + case '$pageview': + Component = IconEyeVisible + break + case '$pageleave': + Component = IconEyeHidden + break + case '$autocapture': + Component = IconAdsClick + break + case '$rageclick': + Component = IconExclamation + break + default: + Component = IconCode + } + return ( + + + + ) +} diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/NotebookNodePersonFeed.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/NotebookNodePersonFeed.tsx new file mode 100644 index 0000000000000..28f71f4493b9a --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/NotebookNodePersonFeed.tsx @@ -0,0 +1,70 @@ +import { useValues } from 'kea' + +import { LemonSkeleton } from '@posthog/lemon-ui' +import { NotFound } from 'lib/components/NotFound' +import { NotebookNodeType, PersonType } from '~/types' +// import { TimelineEntry } from '~/queries/schema' +import { NotebookNodeProps } from 'scenes/notebooks/Notebook/utils' +import { personLogic } from 'scenes/persons/personLogic' +import { createPostHogWidgetNode } from '../NodeWrapper' +import { notebookNodePersonFeedLogic } from './notebookNodePersonFeedLogic' +import { Session } from './Session' + +const FeedSkeleton = (): JSX.Element => ( +
+ +
+) + +type FeedProps = { + person: PersonType +} + +const Feed = ({ person }: FeedProps): JSX.Element => { + const id = person.id ?? 'missing' + const { sessions, sessionsLoading } = useValues(notebookNodePersonFeedLogic({ personId: id })) + + if (!sessions && sessionsLoading) { + return + } else if (sessions === null) { + return + } + + return ( +
+ {sessions.map((session: any) => ( + + ))} +
+ ) +} + +const Component = ({ attributes }: NotebookNodeProps): JSX.Element => { + const { id } = attributes + + const logic = personLogic({ id }) + const { person, personLoading } = useValues(logic) + + if (personLoading) { + return + } else if (!person) { + return + } + + return +} + +type NotebookNodePersonFeedAttributes = { + id: string +} + +export const NotebookNodePersonFeed = createPostHogWidgetNode({ + nodeType: NotebookNodeType.PersonFeed, + titlePlaceholder: 'Feed', + Component, + resizeable: false, + expandable: false, + attributes: { + id: {}, + }, +}) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/Session.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/Session.tsx new file mode 100644 index 0000000000000..e181b27b49bd2 --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/Session.tsx @@ -0,0 +1,89 @@ +import { useState } from 'react' +import { useActions, useValues } from 'kea' + +import { LemonButton } from '@posthog/lemon-ui' +import { IconRewindPlay } from '@posthog/icons' +import { dayjs } from 'lib/dayjs' +// import { TimelineEntry } from '~/queries/schema' +import { NotebookNodeType } from '~/types' +import { IconUnfoldLess, IconUnfoldMore } from 'lib/lemon-ui/icons' +import { humanFriendlyDetailedTime, humanFriendlyDuration } from 'lib/utils' +import { SessionEvent } from './SessionEvent' +import { notebookNodeLogic } from '../notebookNodeLogic' + +type SessionProps = { + session: any // TimelineEntry +} + +export const Session = ({ session }: SessionProps): JSX.Element => { + const { children, nodeId } = useValues(notebookNodeLogic) + const { updateAttributes } = useActions(notebookNodeLogic) + + const startTime = dayjs(session.events[0].timestamp) + const endTime = dayjs(session.events[session.events.length - 1].timestamp) + const durationSeconds = endTime.diff(startTime, 'second') + + const [isFolded, setIsFolded] = useState(false) + + const onOpenReplay = (): void => { + const newChildren = [...children] || [] + + const existingChild = newChildren.find((child) => child.attrs?.nodeId === `${nodeId}-active-replay`) + + if (existingChild) { + existingChild.attrs.id = session.sessionId + } else { + newChildren.splice(0, 0, { + type: NotebookNodeType.Recording, + attrs: { + id: session.sessionId, + nodeId: `${nodeId}-active-replay`, + height: '5rem', + __init: { + expanded: true, + }, + }, + }) + } + + updateAttributes({ + children: newChildren, + }) + } + + return ( +
+
+
+ : } + status="stealth" + onClick={() => setIsFolded((state) => !state)} + /> + {humanFriendlyDetailedTime(startTime)} +
+
+ + {session.events.length} events in {humanFriendlyDuration(durationSeconds)} + + {session.recording_duration_s ? ( + } + onClick={() => onOpenReplay()} + /> + ) : null} +
+
+ {!isFolded && ( +
+ {session.events.map((event: any) => ( + + ))} +
+ )} +
+ ) +} diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/SessionEvent.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/SessionEvent.tsx new file mode 100644 index 0000000000000..00131544c5662 --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/SessionEvent.tsx @@ -0,0 +1,18 @@ +import { EventType } from '~/types' +import { eventToDescription } from 'lib/utils' +import { dayjs } from 'lib/dayjs' +import { EventIcon } from './EventIcon' + +type SessionEventProps = { event: EventType } + +export const SessionEvent = ({ event }: SessionEventProps): JSX.Element => ( +
+
+ + {eventToDescription(event)} +
+
+ {dayjs(event.timestamp).format('h:mm:ss A')} +
+
+) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/mockSessionsTimelineQueryResponse.json b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/mockSessionsTimelineQueryResponse.json new file mode 100644 index 0000000000000..c88078c9cafa0 --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/mockSessionsTimelineQueryResponse.json @@ -0,0 +1,27214 @@ +{ + "hasMore": false, + "hogql": "SELECT e.uuid, e.timestamp, e.event, e.properties, e.distinct_id, e.elements_chain, e.$session_id AS formal_session_id, first_value(e.uuid) OVER (PARTITION BY $session_id ORDER BY divide(__toInt64(timestamp), 60000000.0) ASC RANGE BETWEEN 1800 PRECEDING AND CURRENT ROW) AS informal_session_uuid, dateDiff('s', sre.start_time, sre.end_time) AS recording_duration_s FROM events AS e LEFT JOIN (SELECT start_time, end_time, session_id FROM session_replay_events) AS sre ON equals(e.$session_id, sre.session_id) WHERE equals(e.person_id, '018a92fd-a1c3-0000-4144-fb39888c298e') ORDER BY timestamp DESC LIMIT 100", + "results": [ + { + "events": [ + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageleave", + "id": "018b4c9d-aef5-789f-81e4-212476c51e92", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/canvas#state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6IkV2ZW50IEV4cGxvcmVyIn1dfSx7InR5cGUiOiJwaC1xdWVyeSIsImF0dHJzIjp7ImhlaWdodCI6bnVsbCwidGl0bGUiOm51bGwsIm5vZGVJZCI6IjZkNDg1MDY2LWVjOTktNDgzZC04Yjk4LTRkOGEyZGM5Y2M0YiIsIl9faW5pdCI6bnVsbCwiY2hpbGRyZW4iOlt7InR5cGUiOiJwaC1wZXJzb24iLCJhdHRycyI6eyJpZCI6InBsckVZOHBBN3NYMW1GdDg3NGFBVUhlNXRXWmkzbExGOWJ1OWVqUXlHUHkifX0seyJ0eXBlIjoicGgtcmVjb3JkaW5nIiwiYXR0cnMiOnsiaWQiOiIwMThiNDc0MC1kY2JlLTdkNzctYjJlMi05MmJjZTIwYTIzZTMifX1dLCJxdWVyeSI6eyJraW5kIjoiRGF0YVRhYmxlTm9kZSIsInNvdXJjZSI6eyJraW5kIjoiRXZlbnRzUXVlcnkiLCJhZnRlciI6Ii0yNGgiLCJsaW1pdCI6MTAwLCJzZWxlY3QiOlsiKiIsImV2ZW50IiwicGVyc29uIiwiY29hbGVzY2UocHJvcGVydGllcy4kY3VycmVudF91cmwsIHByb3BlcnRpZXMuJHNjcmVlbl9uYW1lKSAtLSBVcmwgLyBTY3JlZW4iLCJwcm9wZXJ0aWVzLiRsaWIiLCJ0aW1lc3RhbXAiXSwicHJvcGVydGllcyI6W119fX19XX0%3D", + "$host": "localhost:8000", + "$pathname": "/canvas", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "xumznmndw7ojbqql", + "$time": 1697797484.277, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4c9d-ab5d-778b-9a9c-57db85a58ec0", + "$window_id": "018b4c9d-ab5d-778b-9a9c-57dc585fb9f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/canvas#state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6IkV2ZW50IEV4cGxvcmVyIn1dfSx7InR5cGUiOiJwaC1xdWVyeSIsImF0dHJzIjp7ImhlaWdodCI6bnVsbCwidGl0bGUiOm51bGwsIm5vZGVJZCI6IjZkNDg1MDY2LWVjOTktNDgzZC04Yjk4LTRkOGEyZGM5Y2M0YiIsIl9faW5pdCI6bnVsbCwiY2hpbGRyZW4iOlt7InR5cGUiOiJwaC1wZXJzb24iLCJhdHRycyI6eyJpZCI6InBsckVZOHBBN3NYMW1GdDg3NGFBVUhlNXRXWmkzbExGOWJ1OWVqUXlHUHkifX0seyJ0eXBlIjoicGgtcmVjb3JkaW5nIiwiYXR0cnMiOnsiaWQiOiIwMThiNDc0MC1kY2JlLTdkNzctYjJlMi05MmJjZTIwYTIzZTMifX1dLCJxdWVyeSI6eyJraW5kIjoiRGF0YVRhYmxlTm9kZSIsInNvdXJjZSI6eyJraW5kIjoiRXZlbnRzUXVlcnkiLCJhZnRlciI6Ii0yNGgiLCJsaW1pdCI6MTAwLCJzZWxlY3QiOlsiKiIsImV2ZW50IiwicGVyc29uIiwiY29hbGVzY2UocHJvcGVydGllcy4kY3VycmVudF91cmwsIHByb3BlcnRpZXMuJHNjcmVlbl9uYW1lKSAtLSBVcmwgLyBTY3JlZW4iLCJwcm9wZXJ0aWVzLiRsaWIiLCJ0aW1lc3RhbXAiXSwicHJvcGVydGllcyI6W119fX19XX0%3D", + "$pathname": "/canvas", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/canvas#state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6IkV2ZW50IEV4cGxvcmVyIn1dfSx7InR5cGUiOiJwaC1xdWVyeSIsImF0dHJzIjp7ImhlaWdodCI6bnVsbCwidGl0bGUiOm51bGwsIm5vZGVJZCI6IjZkNDg1MDY2LWVjOTktNDgzZC04Yjk4LTRkOGEyZGM5Y2M0YiIsIl9faW5pdCI6bnVsbCwiY2hpbGRyZW4iOlt7InR5cGUiOiJwaC1wZXJzb24iLCJhdHRycyI6eyJpZCI6InBsckVZOHBBN3NYMW1GdDg3NGFBVUhlNXRXWmkzbExGOWJ1OWVqUXlHUHkifX0seyJ0eXBlIjoicGgtcmVjb3JkaW5nIiwiYXR0cnMiOnsiaWQiOiIwMThiNDc0MC1kY2JlLTdkNzctYjJlMi05MmJjZTIwYTIzZTMifX1dLCJxdWVyeSI6eyJraW5kIjoiRGF0YVRhYmxlTm9kZSIsInNvdXJjZSI6eyJraW5kIjoiRXZlbnRzUXVlcnkiLCJhZnRlciI6Ii0yNGgiLCJsaW1pdCI6MTAwLCJzZWxlY3QiOlsiKiIsImV2ZW50IiwicGVyc29uIiwiY29hbGVzY2UocHJvcGVydGllcy4kY3VycmVudF91cmwsIHByb3BlcnRpZXMuJHNjcmVlbl9uYW1lKSAtLSBVcmwgLyBTY3JlZW4iLCJwcm9wZXJ0aWVzLiRsaWIiLCJ0aW1lc3RhbXAiXSwicHJvcGVydGllcyI6W119fX19XX0%3D", + "$initial_pathname": "/canvas", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:24:44.284000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:24:44.281000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$opt_in", + "id": "018b4ca0-9676-7421-9397-8506bb94ebac", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "vodqry65n7z97q71", + "$time": 1697797674.614, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4c9d-ab5d-778b-9a9c-57db85a58ec0", + "$window_id": "018b4ca0-9674-7a84-9e68-ee5996f32986", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:54.616000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.625000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca0-9678-732b-a9e0-bfbbd9dbc2ce", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "tkovmn0505g1e31n", + "$time": 1697797674.616, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "title": "PostHog", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4c9d-ab5d-778b-9a9c-57db85a58ec0", + "$window_id": "018b4ca0-9674-7a84-9e68-ee5996f32986", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:54.617000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.631000+00:00", + "uuid": null + } + ], + "recording_duration_s": null, + "sessionId": "018b4c9d-ab5d-778b-9a9c-57db85a58ec0" + }, + { + "events": [ + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$set", + "id": "018b4ca0-9686-7b75-ba2e-61e8667c6a38", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "rffzbtlav260619e", + "$time": 1697797674.63, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$set": { + "email": "test@posthog.com", + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.639000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4ca0-9687-7734-890a-7891724ddd1e", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "8cp3ng6n4b6np5ys", + "$time": 1697797674.631, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "project", + "$group_key": "018a92f8-b602-0000-75de-4b9073693531", + "$group_set": { + "id": 1, + "uuid": "018a92f8-b602-0000-75de-4b9073693531", + "name": "Hedgebox", + "ingested_event": true, + "is_demo": false, + "timezone": "UTC", + "instance_tag": "none" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.640000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4ca0-9688-735f-882b-8dfa30ad146b", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "l4ypd28525qtjw68", + "$time": 1697797674.632, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "organization", + "$group_key": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_set": { + "id": "018a92f8-afff-0000-efec-ca77de39e384", + "name": "Hedgebox Inc.", + "slug": "hedgebox-inc", + "created_at": "2023-09-14T09:14:46.145060Z", + "available_features": [ + "zapier", + "slack_integration", + "microsoft_teams_integration", + "discord_integration", + "apps", + "app_metrics", + "boolean_flags", + "multivariate_flags", + "persist_flags_cross_authentication", + "feature_flag_payloads", + "multiple_release_conditions", + "release_condition_overrides", + "targeting_by_group", + "local_evaluation_and_bootstrapping", + "flag_usage_stats", + "experimentation", + "group_experiments", + "funnel_experiments", + "secondary_metrics", + "statistical_analysis", + "console_logs", + "recordings_playlists", + "recordings_performance", + "recordings_file_export", + "group_analytics", + "dashboards", + "funnels", + "graphs_trends", + "paths", + "subscriptions", + "paths_advanced", + "dashboard_permissioning", + "dashboard_collaboration", + "ingestion_taxonomy", + "correlation_analysis", + "tagging", + "behavioral_cohort_filtering", + "tracked_users", + "data_retention", + "team_members", + "organizations_projects", + "api_access", + "project_based_permissioning", + "social_sso", + "sso_enforcement", + "white_labelling", + "community_support", + "dedicated_support", + "email_support", + "terms_and_conditions", + "security_assessment" + ], + "instance_tag": "none" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.641000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca0-96d9-72c4-940e-87b71e722e23", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "xnqcrio4jvhwlgow", + "$time": 1697797674.714, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "posthog-3000", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.723000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca0-96da-7ee1-870d-308e41519ca3", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "9hatx583uf4rb4c8", + "$time": 1697797674.714, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "enable-prompts", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.724000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca0-96e3-7222-bd72-66e3936e12d4", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "qzz7i4g28jxppet9", + "$time": 1697797674.725, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "notebooks", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.734000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca0-976b-793a-9471-146bc8d275fa", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "zd3q70dq6xabtyjy", + "$time": 1697797674.859, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "title": "Homepage \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.868000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4ca0-976e-70e4-9638-9cc06f5ff7ae", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "tr5rt49cwyup64d1", + "$time": 1697797674.863, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "instance", + "$group_key": "http://localhost:8000", + "$group_set": { + "site_url": "http://localhost:8000" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:54.872000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "dashboard loading time", + "id": "018b4ca0-9a3d-701d-ae6b-2dd63b414e53", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "bky5j00lf96hy1er", + "$time": 1697797675.581, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 732, + "dashboardId": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:55.590000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "viewed dashboard", + "id": "018b4ca0-9d0b-7f91-95e6-1e4ca5ee515d", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "r9bc60imc531ktci", + "$time": 1697797676.299, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "created_at": "2023-09-14T09:15:00.211731Z", + "is_shared": false, + "pinned": true, + "creation_mode": "default", + "sample_items_count": 0, + "item_count": 7, + "created_by_system": true, + "dashboard_id": 1, + "lastRefreshed": "2023-10-20T10:20:11.963Z", + "refreshAge": 464, + "lifecycle_count": 2, + "trends_count": 3, + "funnels_count": 1, + "retention_count": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:56.308000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": ["Link", "SidebarListItem__link"], + "attr_id": null, + "attributes": { + "attr__class": "Link SidebarListItem__link", + "attr__draggable": "true", + "attr__href": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI" + }, + "href": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "a", + "text": null + }, + { + "attr_class": ["SidebarListItem"], + "attr_id": "sidebar-krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI,018a92fd-7b25-7770-a050-577e94fd148c,018b4715-2fd1-7b9d-b1de-4cf7972a15e3", + "attributes": { + "attr__aria-disabled": "false", + "attr__aria-invalid": "false", + "attr__class": "SidebarListItem", + "attr__id": "sidebar-krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI,018a92fd-7b25-7770-a050-577e94fd148c,018b4715-2fd1-7b9d-b1de-4cf7972a15e3", + "attr__style": "height: 32px; left: 0px; position: absolute; top: 64px; width: 100%;", + "attr__title": "test@posthog.com" + }, + "href": null, + "nth_child": 3.0, + "nth_of_type": 3.0, + "order": 1.0, + "tag_name": "li", + "text": null + }, + { + "attr_class": ["ReactVirtualized__Grid__innerScrollContainer"], + "attr_id": null, + "attributes": { + "attr__class": "ReactVirtualized__Grid__innerScrollContainer", + "attr__role": "rowgroup", + "attr__style": "width: auto; height: 95872px; max-width: 250px; max-height: 95872px; overflow: hidden; position: relative;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ReactVirtualized__Grid", "ReactVirtualized__List", "SidebarList"], + "attr_id": null, + "attributes": { + "attr__aria-label": "grid", + "attr__aria-readonly": "true", + "attr__class": "ReactVirtualized__Grid ReactVirtualized__List SidebarList", + "attr__role": "grid", + "attr__style": "box-sizing: border-box; direction: ltr; height: 312px; position: relative; width: 250px; will-change: transform; overflow: hidden auto;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__style": "overflow: visible; height: 0px;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex-1"], + "attr_id": null, + "attributes": { + "attr__class": "flex-1", + "attr__style": "position: relative;" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Accordion"], + "attr_id": null, + "attributes": { + "attr__aria-busy": "false", + "attr__aria-disabled": "false", + "attr__aria-expanded": "true", + "attr__class": "Accordion" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "section", + "text": null + }, + { + "attr_class": ["Sidebar3000__lists"], + "attr_id": null, + "attributes": { + "attr__class": "Sidebar3000__lists" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Sidebar3000__content"], + "attr_id": null, + "attributes": { + "attr__class": "Sidebar3000__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Sidebar3000"], + "attr_id": null, + "attributes": { + "attr__aria-hidden": "false", + "attr__class": "Sidebar3000", + "attr__style": "--sidebar-width: 250px;" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 9.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 12.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "a.Link.SidebarListItem__link:attr__class=\"Link SidebarListItem__link\"attr__draggable=\"true\"attr__href=\"/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI\"href=\"/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI\"nth-child=\"1\"nth-of-type=\"1\";li.SidebarListItem:attr__aria-disabled=\"false\"attr__aria-invalid=\"false\"attr__class=\"SidebarListItem\"attr__id=\"sidebar-krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI,018a92fd-7b25-7770-a050-577e94fd148c,018b4715-2fd1-7b9d-b1de-4cf7972a15e3\"attr__style=\"height: 32px; left: 0px; position: absolute; top: 64px; width: 100%;\"attr__title=\"test@posthog.com\"attr_id=\"sidebar-krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI,018a92fd-7b25-7770-a050-577e94fd148c,018b4715-2fd1-7b9d-b1de-4cf7972a15e3\"nth-child=\"3\"nth-of-type=\"3\";div.ReactVirtualized__Grid__innerScrollContainer:attr__class=\"ReactVirtualized__Grid__innerScrollContainer\"attr__role=\"rowgroup\"attr__style=\"width: auto; height: 95872px; max-width: 250px; max-height: 95872px; overflow: hidden; position: relative;\"nth-child=\"1\"nth-of-type=\"1\";div.ReactVirtualized__Grid.ReactVirtualized__List.SidebarList:attr__aria-label=\"grid\"attr__aria-readonly=\"true\"attr__class=\"ReactVirtualized__Grid ReactVirtualized__List SidebarList\"attr__role=\"grid\"attr__style=\"box-sizing: border-box; direction: ltr; height: 312px; position: relative; width: 250px; will-change: transform; overflow: hidden auto;\"nth-child=\"1\"nth-of-type=\"1\";div:attr__style=\"overflow: visible; height: 0px;\"nth-child=\"1\"nth-of-type=\"1\";div.flex-1:attr__class=\"flex-1\"attr__style=\"position: relative;\"nth-child=\"2\"nth-of-type=\"2\";section.Accordion:attr__aria-busy=\"false\"attr__aria-disabled=\"false\"attr__aria-expanded=\"true\"attr__class=\"Accordion\"nth-child=\"1\"nth-of-type=\"1\";div.Sidebar3000__lists:attr__class=\"Sidebar3000__lists\"nth-child=\"2\"nth-of-type=\"2\";div.Sidebar3000__content:attr__class=\"Sidebar3000__content\"nth-child=\"1\"nth-of-type=\"1\";div.Sidebar3000:attr__aria-hidden=\"false\"attr__class=\"Sidebar3000\"attr__style=\"--sidebar-width: 250px;\"nth-child=\"2\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4ca0-a013-7c3d-9cbd-825b3733cec2", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "b99wxvg3x3924y92", + "$time": 1697797677.075, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:57.084000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca0-a01c-769b-aa86-7a1610aba22a", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "pd0e9vs1h9dseim5", + "$time": 1697797677.084, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "product-specific-onboarding", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:57.094000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca0-a082-7b23-a908-b488abaaa928", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "81zsk054pmqq1xc0", + "$time": 1697797677.187, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "cs-dashboards", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:57.196000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca0-a11d-73af-85b8-ed8d0af6f60c", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "h057ocr0sxdig2et", + "$time": 1697797677.342, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "title": "Persons \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:27:57.623000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:57.351000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__href": "/person/bbf314c9-0877-450e-793a-f33cf151dd96" + }, + "href": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "h5", + "text": "bbf314c9-0877-450e-793a-f33cf151dd96" + }, + { + "attr_class": ["Link", "SidebarListItem__link"], + "attr_id": null, + "attributes": { + "attr__class": "Link SidebarListItem__link", + "attr__draggable": "true", + "attr__href": "/person/bbf314c9-0877-450e-793a-f33cf151dd96" + }, + "href": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "a", + "text": null + }, + { + "attr_class": ["SidebarListItem"], + "attr_id": "sidebar-bbf314c9-0877-450e-793a-f33cf151dd96", + "attributes": { + "attr__aria-disabled": "false", + "attr__aria-invalid": "false", + "attr__class": "SidebarListItem", + "attr__id": "sidebar-bbf314c9-0877-450e-793a-f33cf151dd96", + "attr__style": "height: 32px; left: 0px; position: absolute; top: 96px; width: 100%;", + "attr__title": "bbf314c9-0877-450e-793a-f33cf151dd96" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 4.0, + "order": 2.0, + "tag_name": "li", + "text": null + }, + { + "attr_class": ["ReactVirtualized__Grid__innerScrollContainer"], + "attr_id": null, + "attributes": { + "attr__class": "ReactVirtualized__Grid__innerScrollContainer", + "attr__role": "rowgroup", + "attr__style": "width: auto; height: 95872px; max-width: 250px; max-height: 95872px; overflow: hidden; position: relative;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ReactVirtualized__Grid", "ReactVirtualized__List", "SidebarList"], + "attr_id": null, + "attributes": { + "attr__aria-label": "grid", + "attr__aria-readonly": "true", + "attr__class": "ReactVirtualized__Grid ReactVirtualized__List SidebarList", + "attr__role": "grid", + "attr__style": "box-sizing: border-box; direction: ltr; height: 312px; position: relative; width: 250px; will-change: transform; overflow: hidden auto;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__style": "overflow: visible; height: 0px;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex-1"], + "attr_id": null, + "attributes": { + "attr__class": "flex-1", + "attr__style": "position: relative;" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Accordion"], + "attr_id": null, + "attributes": { + "attr__aria-busy": "false", + "attr__aria-disabled": "false", + "attr__aria-expanded": "true", + "attr__class": "Accordion" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "section", + "text": null + }, + { + "attr_class": ["Sidebar3000__lists"], + "attr_id": null, + "attributes": { + "attr__class": "Sidebar3000__lists" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Sidebar3000__content"], + "attr_id": null, + "attributes": { + "attr__class": "Sidebar3000__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 9.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Sidebar3000"], + "attr_id": null, + "attributes": { + "attr__aria-hidden": "false", + "attr__class": "Sidebar3000", + "attr__style": "--sidebar-width: 250px;" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 12.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 13.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "h5:attr__href=\"/person/bbf314c9-0877-450e-793a-f33cf151dd96\"href=\"/person/bbf314c9-0877-450e-793a-f33cf151dd96\"nth-child=\"1\"nth-of-type=\"1\"text=\"bbf314c9-0877-450e-793a-f33cf151dd96\";a.Link.SidebarListItem__link:attr__class=\"Link SidebarListItem__link\"attr__draggable=\"true\"attr__href=\"/person/bbf314c9-0877-450e-793a-f33cf151dd96\"href=\"/person/bbf314c9-0877-450e-793a-f33cf151dd96\"nth-child=\"1\"nth-of-type=\"1\";li.SidebarListItem:attr__aria-disabled=\"false\"attr__aria-invalid=\"false\"attr__class=\"SidebarListItem\"attr__id=\"sidebar-bbf314c9-0877-450e-793a-f33cf151dd96\"attr__style=\"height: 32px; left: 0px; position: absolute; top: 96px; width: 100%;\"attr__title=\"bbf314c9-0877-450e-793a-f33cf151dd96\"attr_id=\"sidebar-bbf314c9-0877-450e-793a-f33cf151dd96\"nth-child=\"4\"nth-of-type=\"4\";div.ReactVirtualized__Grid__innerScrollContainer:attr__class=\"ReactVirtualized__Grid__innerScrollContainer\"attr__role=\"rowgroup\"attr__style=\"width: auto; height: 95872px; max-width: 250px; max-height: 95872px; overflow: hidden; position: relative;\"nth-child=\"1\"nth-of-type=\"1\";div.ReactVirtualized__Grid.ReactVirtualized__List.SidebarList:attr__aria-label=\"grid\"attr__aria-readonly=\"true\"attr__class=\"ReactVirtualized__Grid ReactVirtualized__List SidebarList\"attr__role=\"grid\"attr__style=\"box-sizing: border-box; direction: ltr; height: 312px; position: relative; width: 250px; will-change: transform; overflow: hidden auto;\"nth-child=\"1\"nth-of-type=\"1\";div:attr__style=\"overflow: visible; height: 0px;\"nth-child=\"1\"nth-of-type=\"1\";div.flex-1:attr__class=\"flex-1\"attr__style=\"position: relative;\"nth-child=\"2\"nth-of-type=\"2\";section.Accordion:attr__aria-busy=\"false\"attr__aria-disabled=\"false\"attr__aria-expanded=\"true\"attr__class=\"Accordion\"nth-child=\"1\"nth-of-type=\"1\";div.Sidebar3000__lists:attr__class=\"Sidebar3000__lists\"nth-child=\"2\"nth-of-type=\"2\";div.Sidebar3000__content:attr__class=\"Sidebar3000__content\"nth-child=\"1\"nth-of-type=\"1\";div.Sidebar3000:attr__aria-hidden=\"false\"attr__class=\"Sidebar3000\"attr__style=\"--sidebar-width: 250px;\"nth-child=\"2\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4ca0-a7ad-7c7c-95c6-394a4fe1e057", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "78a5df8y7e77s5ak", + "$time": 1697797679.022, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "bbf314c9-0877-450e-793a-f33cf151dd96", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:59.029000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca0-a7b3-73e0-9616-9a41d3b9f6ec", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$host": "localhost:8000", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "xzd3u2auqqufn8xp", + "$time": 1697797679.027, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "title": "Persons \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:59.034000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca0-a7b5-7ab1-914c-910f91ea3d80", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$host": "localhost:8000", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "gztdmqq16s7ax9lr", + "$time": 1697797679.029, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "persons-hogql-query", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:59.036000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca0-a7b6-7f8c-87d8-47ec71ea6588", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$host": "localhost:8000", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "dzmx9uoovohdmidn", + "$time": 1697797679.03, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "hogql-insights", + "$feature_flag_response": false, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:59.037000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "query completed", + "id": "018b4ca0-a897-7497-bd78-4c922df74162", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$host": "localhost:8000", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "1ox32j426hf22rbq", + "$time": 1697797679.255, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "query": { + "kind": "HogQLQuery", + "query": "select id, groupArray(pdi.distinct_id) as distinct_ids, properties, is_identified, created_at from persons where pdi.distinct_id={distinct_id} group by id, properties, is_identified, created_at", + "values": { + "distinct_id": "bbf314c9-0877-450e-793a-f33cf151dd96" + } + }, + "duration": 225.0999999642372, + "clickhouse_sql": "SELECT persons.id, groupArray(persons__pdi.distinct_id) AS distinct_ids, persons.properties, persons.is_identified, toTimeZone(persons.created_at, %(hogql_val_0)s) FROM (SELECT person.id, person.properties AS properties, person.is_identified AS is_identified, person.created_at AS created_at FROM person WHERE and(equals(person.team_id, 1), ifNull(in(tuple(person.id, person.version), (SELECT person.id, max(person.version) AS version FROM person WHERE equals(person.team_id, 1) GROUP BY person.id HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id FROM person_distinct_id2 WHERE equals(person_distinct_id2.team_id, 1) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS persons__pdi ON equals(persons.id, persons__pdi.person_id) WHERE ifNull(equals(persons__pdi.distinct_id, %(hogql_val_1)s), 0) GROUP BY persons.id, persons.properties, persons.is_identified, toTimeZone(persons.created_at, %(hogql_val_2)s) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:59.262000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__href": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI" + }, + "href": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "h5", + "text": "test@posthog.com" + }, + { + "attr_class": ["Link", "SidebarListItem__link"], + "attr_id": null, + "attributes": { + "attr__class": "Link SidebarListItem__link", + "attr__draggable": "true", + "attr__href": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI" + }, + "href": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "a", + "text": null + }, + { + "attr_class": ["SidebarListItem"], + "attr_id": "sidebar-krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI,018a92fd-7b25-7770-a050-577e94fd148c,018b4715-2fd1-7b9d-b1de-4cf7972a15e3", + "attributes": { + "attr__aria-disabled": "false", + "attr__aria-invalid": "false", + "attr__class": "SidebarListItem", + "attr__id": "sidebar-krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI,018a92fd-7b25-7770-a050-577e94fd148c,018b4715-2fd1-7b9d-b1de-4cf7972a15e3", + "attr__style": "height: 32px; left: 0px; position: absolute; top: 64px; width: 100%;", + "attr__title": "test@posthog.com" + }, + "href": null, + "nth_child": 3.0, + "nth_of_type": 3.0, + "order": 2.0, + "tag_name": "li", + "text": null + }, + { + "attr_class": ["ReactVirtualized__Grid__innerScrollContainer"], + "attr_id": null, + "attributes": { + "attr__class": "ReactVirtualized__Grid__innerScrollContainer", + "attr__role": "rowgroup", + "attr__style": "width: auto; height: 95872px; max-width: 250px; max-height: 95872px; overflow: hidden; position: relative;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ReactVirtualized__Grid", "ReactVirtualized__List", "SidebarList"], + "attr_id": null, + "attributes": { + "attr__aria-label": "grid", + "attr__aria-readonly": "true", + "attr__class": "ReactVirtualized__Grid ReactVirtualized__List SidebarList", + "attr__role": "grid", + "attr__style": "box-sizing: border-box; direction: ltr; height: 312px; position: relative; width: 250px; will-change: transform; overflow: hidden auto;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__style": "overflow: visible; height: 0px;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex-1"], + "attr_id": null, + "attributes": { + "attr__class": "flex-1", + "attr__style": "position: relative;" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Accordion"], + "attr_id": null, + "attributes": { + "attr__aria-busy": "false", + "attr__aria-disabled": "false", + "attr__aria-expanded": "true", + "attr__class": "Accordion" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "section", + "text": null + }, + { + "attr_class": ["Sidebar3000__lists"], + "attr_id": null, + "attributes": { + "attr__class": "Sidebar3000__lists" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Sidebar3000__content"], + "attr_id": null, + "attributes": { + "attr__class": "Sidebar3000__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 9.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Sidebar3000"], + "attr_id": null, + "attributes": { + "attr__aria-hidden": "false", + "attr__class": "Sidebar3000", + "attr__style": "--sidebar-width: 250px;" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 12.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 13.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "h5:attr__href=\"/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI\"href=\"/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI\"nth-child=\"1\"nth-of-type=\"1\"text=\"test@posthog.com\";a.Link.SidebarListItem__link:attr__class=\"Link SidebarListItem__link\"attr__draggable=\"true\"attr__href=\"/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI\"href=\"/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI\"nth-child=\"1\"nth-of-type=\"1\";li.SidebarListItem:attr__aria-disabled=\"false\"attr__aria-invalid=\"false\"attr__class=\"SidebarListItem\"attr__id=\"sidebar-krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI,018a92fd-7b25-7770-a050-577e94fd148c,018b4715-2fd1-7b9d-b1de-4cf7972a15e3\"attr__style=\"height: 32px; left: 0px; position: absolute; top: 64px; width: 100%;\"attr__title=\"test@posthog.com\"attr_id=\"sidebar-krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI,018a92fd-7b25-7770-a050-577e94fd148c,018b4715-2fd1-7b9d-b1de-4cf7972a15e3\"nth-child=\"3\"nth-of-type=\"3\";div.ReactVirtualized__Grid__innerScrollContainer:attr__class=\"ReactVirtualized__Grid__innerScrollContainer\"attr__role=\"rowgroup\"attr__style=\"width: auto; height: 95872px; max-width: 250px; max-height: 95872px; overflow: hidden; position: relative;\"nth-child=\"1\"nth-of-type=\"1\";div.ReactVirtualized__Grid.ReactVirtualized__List.SidebarList:attr__aria-label=\"grid\"attr__aria-readonly=\"true\"attr__class=\"ReactVirtualized__Grid ReactVirtualized__List SidebarList\"attr__role=\"grid\"attr__style=\"box-sizing: border-box; direction: ltr; height: 312px; position: relative; width: 250px; will-change: transform; overflow: hidden auto;\"nth-child=\"1\"nth-of-type=\"1\";div:attr__style=\"overflow: visible; height: 0px;\"nth-child=\"1\"nth-of-type=\"1\";div.flex-1:attr__class=\"flex-1\"attr__style=\"position: relative;\"nth-child=\"2\"nth-of-type=\"2\";section.Accordion:attr__aria-busy=\"false\"attr__aria-disabled=\"false\"attr__aria-expanded=\"true\"attr__class=\"Accordion\"nth-child=\"1\"nth-of-type=\"1\";div.Sidebar3000__lists:attr__class=\"Sidebar3000__lists\"nth-child=\"2\"nth-of-type=\"2\";div.Sidebar3000__content:attr__class=\"Sidebar3000__content\"nth-child=\"1\"nth-of-type=\"1\";div.Sidebar3000:attr__aria-hidden=\"false\"attr__class=\"Sidebar3000\"attr__style=\"--sidebar-width: 250px;\"nth-child=\"2\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4ca0-a98c-7384-b2e2-769ad052ee0b", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$host": "localhost:8000", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "je02qyvh3bhr5mgo", + "$time": 1697797679.501, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "test@posthog.com", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_pathname": "/person/bbf314c9-0877-450e-793a-f33cf151dd96", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:59.508000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca0-a98f-7b7f-97f9-94db73ef7b5d", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "rv3lg9ojjqh0bazz", + "$time": 1697797679.504, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "title": "bbf314c9-0877-450e-793a-f33cf151dd96 \u2022 Persons \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:59.511000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "query completed", + "id": "018b4ca0-aa6e-72a0-b88c-a09bd8399999", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "2zh0feb6n1tf9a1f", + "$time": 1697797679.727, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "query": { + "kind": "HogQLQuery", + "query": "select id, groupArray(pdi.distinct_id) as distinct_ids, properties, is_identified, created_at from persons where pdi.distinct_id={distinct_id} group by id, properties, is_identified, created_at", + "values": { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI" + } + }, + "duration": 221.60000002384186, + "clickhouse_sql": "SELECT persons.id, groupArray(persons__pdi.distinct_id) AS distinct_ids, persons.properties, persons.is_identified, toTimeZone(persons.created_at, %(hogql_val_0)s) FROM (SELECT person.id, person.properties AS properties, person.is_identified AS is_identified, person.created_at AS created_at FROM person WHERE and(equals(person.team_id, 1), ifNull(in(tuple(person.id, person.version), (SELECT person.id, max(person.version) AS version FROM person WHERE equals(person.team_id, 1) GROUP BY person.id HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id FROM person_distinct_id2 WHERE equals(person_distinct_id2.team_id, 1) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS persons__pdi ON equals(persons.id, persons__pdi.person_id) WHERE ifNull(equals(persons__pdi.distinct_id, %(hogql_val_1)s), 0) GROUP BY persons.id, persons.properties, persons.is_identified, toTimeZone(persons.created_at, %(hogql_val_2)s) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:27:59.734000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "person viewed", + "id": "018b4ca0-ac64-7425-ad3b-067188569418", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "tsn5j4iw9zo6hbti", + "$time": 1697797680.229, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "properties_count": 40, + "has_email": true, + "has_name": false, + "custom_properties_count": 15, + "posthog_properties_count": 25, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:28:00.236000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": ["LemonTabs__tab-content"], + "attr_id": null, + "attributes": { + "attr__class": "LemonTabs__tab-content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["LemonTabs__tab"], + "attr_id": null, + "attributes": { + "attr__aria-selected": "false", + "attr__class": "LemonTabs__tab", + "attr__role": "tab", + "attr__tabindex": "0" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "li", + "text": null + }, + { + "attr_class": ["LemonTabs__bar"], + "attr_id": null, + "attributes": { + "attr__class": "LemonTabs__bar", + "attr__role": "tablist" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "ul", + "text": null + }, + { + "attr_class": ["LemonTabs"], + "attr_id": null, + "attributes": { + "attr__class": "LemonTabs", + "attr__data-attr": "persons-tabs", + "attr__style": "--lemon-tabs-slider-width: 66.671875px; --lemon-tabs-slider-offset: 64.0078125px;" + }, + "href": null, + "nth_child": 3.0, + "nth_of_type": 3.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000__scene"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000__scene" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "main", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 8.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "div.LemonTabs__tab-content:attr__class=\"LemonTabs__tab-content\"nth-child=\"1\"nth-of-type=\"1\";li.LemonTabs__tab:attr__aria-selected=\"false\"attr__class=\"LemonTabs__tab\"attr__role=\"tab\"attr__tabindex=\"0\"nth-child=\"1\"nth-of-type=\"1\";ul.LemonTabs__bar:attr__class=\"LemonTabs__bar\"attr__role=\"tablist\"nth-child=\"1\"nth-of-type=\"1\";div.LemonTabs:attr__class=\"LemonTabs\"attr__data-attr=\"persons-tabs\"attr__style=\"--lemon-tabs-slider-width: 66.671875px; --lemon-tabs-slider-offset: 64.0078125px;\"nth-child=\"3\"nth-of-type=\"3\";div.Navigation3000__scene:attr__class=\"Navigation3000__scene\"nth-child=\"2\"nth-of-type=\"2\";main:nth-child=\"4\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4ca0-ae21-7de1-bb5d-02e960372d72", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "4ibu5gfwqj4wlqr7", + "$time": 1697797680.673, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:28:00.680000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca0-ae26-7bf7-b0bb-65948958a2b2", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "gt90ymm7l7vaiptd", + "$time": 1697797680.679, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "title": "test@posthog.com \u2022 Persons \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:28:00.686000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca0-ae35-7379-9b8c-431bd55514a9", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "ogr8hhsl0diyzzrf", + "$time": 1697797680.694, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "debug-react-renders", + "$feature_flag_response": false, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:00.793000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:28:00.701000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "query completed", + "id": "018b4ca0-b109-79b8-98c9-2d69c5db0935", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "0flk4pc1i44w8cgq", + "$time": 1697797681.417, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "query": { + "kind": "SessionsTimelineQuery", + "after": "2021-01-01T18:00:00Z", + "before": "2024-01-01T06:00:00Z", + "personId": "018a92fd-a1c3-0000-4144-fb39888c298e" + }, + "duration": 476.5999999642372, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:03.798000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:28:01.427000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "notebook content changed", + "id": "018b4ca0-b25b-74ef-9d18-248c54b956bf", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "qoao0ux8ldt09rzp", + "$time": 1697797681.755, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:03.798000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:28:01.765000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca0-b25b-74ef-9d18-248b29df99ce", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "bgnhtkaiu91n6vuo", + "$time": 1697797681.755, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "title": "test@posthog.com \u2022 Persons \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:28:03.798000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:28:01.765000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageleave", + "id": "018b4ca3-104f-7f7f-b9f6-2e4411315c0f", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "5emypqvdlqmzblda", + "$time": 1697797836.88, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:36.883000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:36.886000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$opt_in", + "id": "018b4ca3-1cf4-7218-9d6f-0303911c1193", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "dqkiz2bg4xd3ga0h", + "$time": 1697797840.118, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:40.118000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.133000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca3-1cf7-7208-b179-72da3175820e", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "pyv7tc2zaju04w06", + "$time": 1697797840.12, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "title": "PostHog", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca0-9686-7b75-ba2e-61e9c0054b65", + "$window_id": "018b4ca0-9686-7b75-ba2e-61ea857e58f0", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:40.121000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.138000+00:00", + "uuid": null + } + ], + "recording_duration_s": 72.0, + "sessionId": "018b4ca0-9686-7b75-ba2e-61e9c0054b65" + }, + { + "events": [ + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "dashboard updated", + "id": "018b4cc2-bd56-0000-ed0f-dbd6f467ac8c", + "person": null, + "properties": { + "pinned": true, + "item_count": 7, + "is_shared": false, + "created_at": "2023-09-14T09:15:00.211731+00:00", + "has_description": true, + "tags_count": 0, + "$groups": { + "instance": "http://localhost:8000", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "project": "018a92f8-b602-0000-75de-4b9073693531" + }, + "$lib": "posthog-python", + "$lib_version": "3.0.1", + "$geoip_disable": true, + "$ip": "127.0.0.1", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_1": "http://localhost:8000", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_3": "018a92f8-b602-0000-75de-4b9073693531" + }, + "timestamp": "2023-10-20T11:05:12.306000+00:00", + "uuid": null + } + ], + "recording_duration_s": null, + "sessionId": "" + }, + { + "events": [ + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$set", + "id": "018b4cc2-908b-7989-a77a-ae123bc7cbc4", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "ha0xgsu9f9blyc2m", + "$time": 1697799901.323, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$set": { + "email": "test@posthog.com", + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.338000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4cc2-908c-7f15-8890-d2f9975a511c", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "ud4b8d49iiqqfdil", + "$time": 1697799901.325, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "project", + "$group_key": "018a92f8-b602-0000-75de-4b9073693531", + "$group_set": { + "id": 1, + "uuid": "018a92f8-b602-0000-75de-4b9073693531", + "name": "Hedgebox", + "ingested_event": true, + "is_demo": false, + "timezone": "UTC", + "instance_tag": "none" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.339000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4cc2-908d-7164-bff2-c69b5d464e46", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "czd0chnpamqs1a4c", + "$time": 1697799901.325, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "organization", + "$group_key": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_set": { + "id": "018a92f8-afff-0000-efec-ca77de39e384", + "name": "Hedgebox Inc.", + "slug": "hedgebox-inc", + "created_at": "2023-09-14T09:14:46.145060Z", + "available_features": [ + "zapier", + "slack_integration", + "microsoft_teams_integration", + "discord_integration", + "apps", + "app_metrics", + "boolean_flags", + "multivariate_flags", + "persist_flags_cross_authentication", + "feature_flag_payloads", + "multiple_release_conditions", + "release_condition_overrides", + "targeting_by_group", + "local_evaluation_and_bootstrapping", + "flag_usage_stats", + "experimentation", + "group_experiments", + "funnel_experiments", + "secondary_metrics", + "statistical_analysis", + "console_logs", + "recordings_playlists", + "recordings_performance", + "recordings_file_export", + "group_analytics", + "dashboards", + "funnels", + "graphs_trends", + "paths", + "subscriptions", + "paths_advanced", + "dashboard_permissioning", + "dashboard_collaboration", + "ingestion_taxonomy", + "correlation_analysis", + "tagging", + "behavioral_cohort_filtering", + "tracked_users", + "data_retention", + "team_members", + "organizations_projects", + "api_access", + "project_based_permissioning", + "social_sso", + "sso_enforcement", + "white_labelling", + "community_support", + "dedicated_support", + "email_support", + "terms_and_conditions", + "security_assessment" + ], + "instance_tag": "none" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.340000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4cc2-90b0-7498-a7ad-d3ff880511c3", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "rwp5rcbgzns63s79", + "$time": 1697799901.36, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "posthog-3000", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.375000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4cc2-90b1-7bfb-9f2a-3a35e51991fe", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "rjjli19zqga47ayx", + "$time": 1697799901.361, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "enable-prompts", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.375000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4cc2-90bd-745c-b4b0-f90272791844", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "vx58g66m43l6t6f3", + "$time": 1697799901.373, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "notebooks", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.387000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4cc2-9140-73ac-b975-4e50e33b3fba", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "x03pwwegn9jrn34n", + "$time": 1697799901.504, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "instance", + "$group_key": "http://localhost:8000", + "$group_set": { + "site_url": "http://localhost:8000" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.519000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4cc2-91af-7940-abb9-c07bae63b31e", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "5r7bw5fp42mlnj6q", + "$time": 1697799901.615, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "title": "Homepage \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.629000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "dashboard loading time", + "id": "018b4cc2-93a1-7246-a6bf-a897501c968c", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "6bivi84ckma4m1y9", + "$time": 1697799902.113, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 507, + "dashboardId": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:02.127000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "viewed dashboard", + "id": "018b4cc2-9669-7d7e-8b47-0c46f32bdc04", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "m30j6hcfsn1nttq0", + "$time": 1697799902.826, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "created_at": "2023-09-14T09:15:00.211731Z", + "is_shared": false, + "pinned": true, + "creation_mode": "default", + "sample_items_count": 0, + "item_count": 7, + "created_by_system": true, + "dashboard_id": 1, + "lastRefreshed": "2023-10-20T10:20:11.963Z", + "refreshAge": 2690, + "lifecycle_count": 2, + "trends_count": 3, + "funnels_count": 1, + "retention_count": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:02.840000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": ["LemonButton__content"], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton__content" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 0.0, + "tag_name": "span", + "text": "No date range override" + }, + { + "attr_class": [ + "LemonButton", + "LemonButton--has-icon", + "LemonButton--has-side-icon", + "LemonButton--secondary", + "LemonButton--small", + "LemonButton--status-stealth" + ], + "attr_id": "daterange_selector", + "attributes": { + "attr__aria-disabled": "false", + "attr__aria-haspopup": "true", + "attr__class": "LemonButton LemonButton--secondary LemonButton--status-stealth LemonButton--small LemonButton--has-icon LemonButton--has-side-icon", + "attr__data-attr": "date-filter", + "attr__id": "daterange_selector", + "attr__type": "button" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "button", + "text": "No date range override" + }, + { + "attr_class": ["flex", "h-8", "items-center", "shrink-0"], + "attr_id": null, + "attributes": { + "attr__class": "flex shrink-0 items-center h-8" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex", "space-x-4"], + "attr_id": null, + "attributes": { + "attr__class": "flex space-x-4" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex", "justify-between", "space-x-4"], + "attr_id": null, + "attributes": { + "attr__class": "flex space-x-4 justify-between" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["dashboard"], + "attr_id": null, + "attributes": { + "attr__class": "dashboard" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["project-homepage"], + "attr_id": null, + "attributes": { + "attr__class": "project-homepage" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000__scene"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000__scene" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 9.0, + "tag_name": "main", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 12.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "span.LemonButton__content:attr__class=\"LemonButton__content\"nth-child=\"2\"nth-of-type=\"2\"text=\"No date range override\";button.LemonButton.LemonButton--has-icon.LemonButton--has-side-icon.LemonButton--secondary.LemonButton--small.LemonButton--status-stealth:attr__aria-disabled=\"false\"attr__aria-haspopup=\"true\"attr__class=\"LemonButton LemonButton--secondary LemonButton--status-stealth LemonButton--small LemonButton--has-icon LemonButton--has-side-icon\"attr__data-attr=\"date-filter\"attr__id=\"daterange_selector\"attr__type=\"button\"attr_id=\"daterange_selector\"nth-child=\"1\"nth-of-type=\"1\"text=\"No date range override\";div.flex.h-8.items-center.shrink-0:attr__class=\"flex shrink-0 items-center h-8\"nth-child=\"1\"nth-of-type=\"1\";div.flex.space-x-4:attr__class=\"flex space-x-4\"nth-child=\"1\"nth-of-type=\"1\";div.flex.justify-between.space-x-4:attr__class=\"flex space-x-4 justify-between\"nth-child=\"1\"nth-of-type=\"1\";div:nth-child=\"1\"nth-of-type=\"1\";div.dashboard:attr__class=\"dashboard\"nth-child=\"1\"nth-of-type=\"1\";div.project-homepage:attr__class=\"project-homepage\"nth-child=\"1\"nth-of-type=\"1\";div.Navigation3000__scene:attr__class=\"Navigation3000__scene\"nth-child=\"2\"nth-of-type=\"2\";main:nth-child=\"4\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-9bd1-7a01-9caf-50416be478be", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "t8lxw271peuuit3u", + "$time": 1697799904.209, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "No date range override", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:04.317000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:04.223000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": [ + "LemonButton", + "LemonButton--full-width", + "LemonButton--status-stealth", + "LemonButton--tertiary" + ], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton LemonButton--tertiary LemonButton--status-stealth LemonButton--full-width", + "attr__type": "button" + }, + "href": null, + "nth_child": 15.0, + "nth_of_type": 14.0, + "order": 0.0, + "tag_name": "button", + "text": "From custom date until now\u2026" + }, + { + "attr_class": ["space-y-px"], + "attr_id": null, + "attributes": { + "attr__class": "space-y-px" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__content"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__box"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__box" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover", "Popover--actionable", "Popover--enter-done"], + "attr_id": null, + "attributes": { + "attr__aria-level": "0", + "attr__class": "Popover Popover--actionable Popover--enter-done", + "attr__data-placement": "bottom-start", + "attr__style": "position: fixed; top: 95.544px; left: 318.838px; max-height: 759.456px; max-width: 1189.16px; width: initial;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__data-floating-ui-portal": "\"attr__id=", + "floating-ui-6\"attr_id": "floating-ui-6" + }, + "href": null, + "nth_child": 16.0, + "nth_of_type": 12.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "button.LemonButton.LemonButton--full-width.LemonButton--status-stealth.LemonButton--tertiary:attr__class=\"LemonButton LemonButton--tertiary LemonButton--status-stealth LemonButton--full-width\"attr__type=\"button\"nth-child=\"15\"nth-of-type=\"14\"text=\"From custom date until now\u2026\";div.space-y-px:attr__class=\"space-y-px\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__content:attr__class=\"Popover__content\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__box:attr__class=\"Popover__box\"nth-child=\"1\"nth-of-type=\"1\";div.Popover.Popover--actionable.Popover--enter-done:attr__aria-level=\"0\"attr__class=\"Popover Popover--actionable Popover--enter-done\"attr__data-placement=\"bottom-start\"attr__style=\"position: fixed; top: 95.544px; left: 318.838px; max-height: 759.456px; max-width: 1189.16px; width: initial;\"nth-child=\"1\"nth-of-type=\"1\";div:attr__data-floating-ui-portal=\"\"attr__id=\"floating-ui-6\"attr_id=\"floating-ui-6\"nth-child=\"16\"nth-of-type=\"12\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-a773-7fd0-a00b-6d36dbcef384", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "aeet4il6oqfjrsgl", + "$time": 1697799907.188, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "From custom date until now\u2026", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:07.436000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:07.200000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": ["LemonIcon"], + "attr_id": null, + "attributes": { + "attr__aria-hidden": "true", + "attr__class": "LemonIcon", + "attr__fill": "none", + "attr__focusable": "false", + "attr__height": "1em", + "attr__viewBox": "0 0 24 24", + "attr__width": "1em", + "attr__xmlns": "http://www.w3.org/2000/svg" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "svg", + "text": null + }, + { + "attr_class": ["LemonButton__icon"], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton__icon" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "span", + "text": null + }, + { + "attr_class": [ + "LemonButton", + "LemonButton--full-width", + "LemonButton--has-icon", + "LemonButton--no-content", + "LemonButton--status-stealth", + "LemonButton--tertiary", + "absolute-left" + ], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton LemonButton--tertiary LemonButton--status-stealth LemonButton--full-width LemonButton--no-content LemonButton--has-icon absolute-left", + "attr__data-attr": "lemon-calendar-month-previous", + "attr__type": "button" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "button", + "text": null + }, + { + "attr_class": ["relative"], + "attr_id": null, + "attributes": { + "attr__class": "relative" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "th", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "tr", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "thead", + "text": null + }, + { + "attr_class": ["LemonCalendar__month"], + "attr_id": null, + "attributes": { + "attr__class": "LemonCalendar__month", + "attr__data-attr": "lemon-calendar-month" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "table", + "text": null + }, + { + "attr_class": ["LemonCalendar", "flex", "gap-4", "items-start"], + "attr_id": null, + "attributes": { + "attr__class": "LemonCalendar flex items-start gap-4", + "attr__data-attr": "lemon-calendar" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["p-2"], + "attr_id": null, + "attributes": { + "attr__class": "p-2" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["LemonCalendarSelect"], + "attr_id": null, + "attributes": { + "attr__class": "LemonCalendarSelect", + "attr__data-attr": "lemon-calendar-select" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 9.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__content"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__box"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__box" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover", "Popover--actionable", "Popover--enter-done"], + "attr_id": null, + "attributes": { + "attr__aria-level": "0", + "attr__class": "Popover Popover--actionable Popover--enter-done", + "attr__data-placement": "bottom-start", + "attr__style": "position: fixed; top: 96px; left: 316px; max-height: 759px; max-width: 1192px; width: initial;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 12.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__data-floating-ui-portal": "\"attr__id=", + "floating-ui-6\"attr_id": "floating-ui-6" + }, + "href": null, + "nth_child": 16.0, + "nth_of_type": 12.0, + "order": 13.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 14.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "svg.LemonIcon:attr__aria-hidden=\"true\"attr__class=\"LemonIcon\"attr__fill=\"none\"attr__focusable=\"false\"attr__height=\"1em\"attr__viewBox=\"0 0 24 24\"attr__width=\"1em\"attr__xmlns=\"http://www.w3.org/2000/svg\"nth-child=\"1\"nth-of-type=\"1\";span.LemonButton__icon:attr__class=\"LemonButton__icon\"nth-child=\"1\"nth-of-type=\"1\";button.LemonButton.LemonButton--full-width.LemonButton--has-icon.LemonButton--no-content.LemonButton--status-stealth.LemonButton--tertiary.absolute-left:attr__class=\"LemonButton LemonButton--tertiary LemonButton--status-stealth LemonButton--full-width LemonButton--no-content LemonButton--has-icon absolute-left\"attr__data-attr=\"lemon-calendar-month-previous\"attr__type=\"button\"nth-child=\"1\"nth-of-type=\"1\";th.relative:attr__class=\"relative\"nth-child=\"1\"nth-of-type=\"1\";tr:nth-child=\"1\"nth-of-type=\"1\";thead:nth-child=\"1\"nth-of-type=\"1\";table.LemonCalendar__month:attr__class=\"LemonCalendar__month\"attr__data-attr=\"lemon-calendar-month\"nth-child=\"1\"nth-of-type=\"1\";div.LemonCalendar.flex.gap-4.items-start:attr__class=\"LemonCalendar flex items-start gap-4\"attr__data-attr=\"lemon-calendar\"nth-child=\"1\"nth-of-type=\"1\";div.p-2:attr__class=\"p-2\"nth-child=\"2\"nth-of-type=\"2\";div.LemonCalendarSelect:attr__class=\"LemonCalendarSelect\"attr__data-attr=\"lemon-calendar-select\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__content:attr__class=\"Popover__content\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__box:attr__class=\"Popover__box\"nth-child=\"1\"nth-of-type=\"1\";div.Popover.Popover--actionable.Popover--enter-done:attr__aria-level=\"0\"attr__class=\"Popover Popover--actionable Popover--enter-done\"attr__data-placement=\"bottom-start\"attr__style=\"position: fixed; top: 96px; left: 316px; max-height: 759px; max-width: 1192px; width: initial;\"nth-child=\"1\"nth-of-type=\"1\";div:attr__data-floating-ui-portal=\"\"attr__id=\"floating-ui-6\"attr_id=\"floating-ui-6\"nth-child=\"16\"nth-of-type=\"12\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-adb8-743c-8b0c-2d058759323d", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "b1np3outhrtw2umv", + "$time": 1697799908.792, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:10.440000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:08.797000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__d": "M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12 19 6.41z", + "attr__fill": "currentColor" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "path", + "text": null + }, + { + "attr_class": ["LemonIcon"], + "attr_id": null, + "attributes": { + "attr__aria-hidden": "true", + "attr__class": "LemonIcon", + "attr__fill": "none", + "attr__focusable": "false", + "attr__height": "1em", + "attr__viewBox": "0 0 24 24", + "attr__width": "1em", + "attr__xmlns": "http://www.w3.org/2000/svg" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "svg", + "text": null + }, + { + "attr_class": ["LemonButton__icon"], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton__icon" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "span", + "text": null + }, + { + "attr_class": [ + "LemonButton", + "LemonButton--has-icon", + "LemonButton--no-content", + "LemonButton--no-padding", + "LemonButton--small", + "LemonButton--status-stealth", + "LemonButton--tertiary" + ], + "attr_id": null, + "attributes": { + "attr__aria-label": "close", + "attr__class": "LemonButton LemonButton--tertiary LemonButton--status-stealth LemonButton--no-padding LemonButton--small LemonButton--no-content LemonButton--has-icon", + "attr__type": "button" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "button", + "text": null + }, + { + "attr_class": ["border-b", "flex", "justify-between", "p-2", "pb-4"], + "attr_id": null, + "attributes": { + "attr__class": "flex justify-between border-b p-2 pb-4" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["LemonCalendarSelect"], + "attr_id": null, + "attributes": { + "attr__class": "LemonCalendarSelect", + "attr__data-attr": "lemon-calendar-select" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__content"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__box"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__box" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover", "Popover--actionable", "Popover--enter-done"], + "attr_id": null, + "attributes": { + "attr__aria-level": "0", + "attr__class": "Popover Popover--actionable Popover--enter-done", + "attr__data-placement": "bottom-start", + "attr__style": "position: fixed; top: 96px; left: 316px; max-height: 759px; max-width: 1192px; width: initial;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__data-floating-ui-portal": "\"attr__id=", + "floating-ui-6\"attr_id": "floating-ui-6" + }, + "href": null, + "nth_child": 16.0, + "nth_of_type": 12.0, + "order": 9.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 10.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "path:attr__d=\"M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12 19 6.41z\"attr__fill=\"currentColor\"nth-child=\"1\"nth-of-type=\"1\";svg.LemonIcon:attr__aria-hidden=\"true\"attr__class=\"LemonIcon\"attr__fill=\"none\"attr__focusable=\"false\"attr__height=\"1em\"attr__viewBox=\"0 0 24 24\"attr__width=\"1em\"attr__xmlns=\"http://www.w3.org/2000/svg\"nth-child=\"1\"nth-of-type=\"1\";span.LemonButton__icon:attr__class=\"LemonButton__icon\"nth-child=\"1\"nth-of-type=\"1\";button.LemonButton.LemonButton--has-icon.LemonButton--no-content.LemonButton--no-padding.LemonButton--small.LemonButton--status-stealth.LemonButton--tertiary:attr__aria-label=\"close\"attr__class=\"LemonButton LemonButton--tertiary LemonButton--status-stealth LemonButton--no-padding LemonButton--small LemonButton--no-content LemonButton--has-icon\"attr__type=\"button\"nth-child=\"2\"nth-of-type=\"1\";div.border-b.flex.justify-between.p-2.pb-4:attr__class=\"flex justify-between border-b p-2 pb-4\"nth-child=\"1\"nth-of-type=\"1\";div.LemonCalendarSelect:attr__class=\"LemonCalendarSelect\"attr__data-attr=\"lemon-calendar-select\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__content:attr__class=\"Popover__content\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__box:attr__class=\"Popover__box\"nth-child=\"1\"nth-of-type=\"1\";div.Popover.Popover--actionable.Popover--enter-done:attr__aria-level=\"0\"attr__class=\"Popover Popover--actionable Popover--enter-done\"attr__data-placement=\"bottom-start\"attr__style=\"position: fixed; top: 96px; left: 316px; max-height: 759px; max-width: 1192px; width: initial;\"nth-child=\"1\"nth-of-type=\"1\";div:attr__data-floating-ui-portal=\"\"attr__id=\"floating-ui-6\"attr_id=\"floating-ui-6\"nth-child=\"16\"nth-of-type=\"12\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-b4f2-7fd1-96fd-33b43dbbba31", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "q4ton426hanvq70d", + "$time": 1697799910.643, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:13.450000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:10.652000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": [ + "LemonButton", + "LemonButton--status-stealth", + "LemonButton--tertiary", + "RollingDateRangeFilter", + "ant-tooltip-open" + ], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton LemonButton--tertiary LemonButton--status-stealth RollingDateRangeFilter ant-tooltip-open", + "attr__data-attr": "rolling-date-range-filter", + "attr__type": "button" + }, + "href": null, + "nth_child": 13.0, + "nth_of_type": 13.0, + "order": 0.0, + "tag_name": "button", + "text": null + }, + { + "attr_class": ["space-y-px"], + "attr_id": null, + "attributes": { + "attr__class": "space-y-px" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__content"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__box"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__box" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover", "Popover--actionable", "Popover--enter-done"], + "attr_id": null, + "attributes": { + "attr__aria-level": "0", + "attr__class": "Popover Popover--actionable Popover--enter-done", + "attr__data-placement": "bottom-start", + "attr__style": "position: fixed; top: 96px; left: 316px; max-height: 759px; max-width: 1192px; width: initial;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__data-floating-ui-portal": "\"attr__id=", + "floating-ui-6\"attr_id": "floating-ui-6" + }, + "href": null, + "nth_child": 16.0, + "nth_of_type": 12.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "button.LemonButton.LemonButton--status-stealth.LemonButton--tertiary.RollingDateRangeFilter.ant-tooltip-open:attr__class=\"LemonButton LemonButton--tertiary LemonButton--status-stealth RollingDateRangeFilter ant-tooltip-open\"attr__data-attr=\"rolling-date-range-filter\"attr__type=\"button\"nth-child=\"13\"nth-of-type=\"13\";div.space-y-px:attr__class=\"space-y-px\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__content:attr__class=\"Popover__content\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__box:attr__class=\"Popover__box\"nth-child=\"1\"nth-of-type=\"1\";div.Popover.Popover--actionable.Popover--enter-done:attr__aria-level=\"0\"attr__class=\"Popover Popover--actionable Popover--enter-done\"attr__data-placement=\"bottom-start\"attr__style=\"position: fixed; top: 96px; left: 316px; max-height: 759px; max-width: 1192px; width: initial;\"nth-child=\"1\"nth-of-type=\"1\";div:attr__data-floating-ui-portal=\"\"attr__id=\"floating-ui-6\"attr_id=\"floating-ui-6\"nth-child=\"16\"nth-of-type=\"12\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-ba8a-744a-9296-ae83a0fb678c", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "9hbicf15by04k3kk", + "$time": 1697799912.074, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:13.450000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:12.083000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "dashboard date range changed", + "id": "018b4cc2-ba98-7ad6-98aa-a99547036166", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "uwqspa6yuoq5q75h", + "$time": 1697799912.089, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "date_from": "-3d", + "date_to": "", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:13.450000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:12.098000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "dashboard analyzed", + "id": "018b4cc2-bb85-7403-a87c-f4ba1a76997f", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "6q7tobdbp8r841sq", + "$time": 1697799912.326, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "created_at": "2023-09-14T09:15:00.211731Z", + "is_shared": false, + "pinned": true, + "creation_mode": "default", + "sample_items_count": 0, + "item_count": 7, + "created_by_system": true, + "dashboard_id": 1, + "lastRefreshed": "2023-10-20T10:20:11.963Z", + "refreshAge": 2700, + "lifecycle_count": 2, + "trends_count": 3, + "funnels_count": 1, + "retention_count": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:13.450000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:12.335000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4cc2-bc1a-78b3-8dd2-62a1da584a79", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "rlklgku5nltwckj1", + "$time": 1697799912.475, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "hogql-insights", + "$feature_flag_response": false, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:13.450000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:12.484000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "dashboard refreshed", + "id": "018b4cc2-bc66-7cf9-b8b3-9b34a5a4bda5", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "lacrgsirai9uzwji", + "$time": 1697799912.551, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "dashboard_id": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:13.450000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:12.560000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": ["LemonButton__content"], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton__content" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 0.0, + "tag_name": "span", + "text": "Last 3 days" + }, + { + "attr_class": [ + "LemonButton", + "LemonButton--has-icon", + "LemonButton--has-side-icon", + "LemonButton--secondary", + "LemonButton--small", + "LemonButton--status-stealth" + ], + "attr_id": "daterange_selector", + "attributes": { + "attr__aria-disabled": "false", + "attr__aria-haspopup": "true", + "attr__class": "LemonButton LemonButton--secondary LemonButton--status-stealth LemonButton--small LemonButton--has-icon LemonButton--has-side-icon", + "attr__data-attr": "date-filter", + "attr__id": "daterange_selector", + "attr__type": "button" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "button", + "text": "Last 3 days" + }, + { + "attr_class": ["flex", "h-8", "items-center", "shrink-0"], + "attr_id": null, + "attributes": { + "attr__class": "flex shrink-0 items-center h-8" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex", "space-x-4"], + "attr_id": null, + "attributes": { + "attr__class": "flex space-x-4" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex", "justify-between", "space-x-4"], + "attr_id": null, + "attributes": { + "attr__class": "flex space-x-4 justify-between" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["dashboard"], + "attr_id": null, + "attributes": { + "attr__class": "dashboard" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["project-homepage"], + "attr_id": null, + "attributes": { + "attr__class": "project-homepage" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000__scene"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000__scene" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 9.0, + "tag_name": "main", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 12.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "span.LemonButton__content:attr__class=\"LemonButton__content\"nth-child=\"2\"nth-of-type=\"2\"text=\"Last 3 days\";button.LemonButton.LemonButton--has-icon.LemonButton--has-side-icon.LemonButton--secondary.LemonButton--small.LemonButton--status-stealth:attr__aria-disabled=\"false\"attr__aria-haspopup=\"true\"attr__class=\"LemonButton LemonButton--secondary LemonButton--status-stealth LemonButton--small LemonButton--has-icon LemonButton--has-side-icon\"attr__data-attr=\"date-filter\"attr__id=\"daterange_selector\"attr__type=\"button\"attr_id=\"daterange_selector\"nth-child=\"1\"nth-of-type=\"1\"text=\"Last 3 days\";div.flex.h-8.items-center.shrink-0:attr__class=\"flex shrink-0 items-center h-8\"nth-child=\"1\"nth-of-type=\"1\";div.flex.space-x-4:attr__class=\"flex space-x-4\"nth-child=\"1\"nth-of-type=\"1\";div.flex.justify-between.space-x-4:attr__class=\"flex space-x-4 justify-between\"nth-child=\"1\"nth-of-type=\"1\";div:nth-child=\"1\"nth-of-type=\"1\";div.dashboard:attr__class=\"dashboard\"nth-child=\"1\"nth-of-type=\"1\";div.project-homepage:attr__class=\"project-homepage\"nth-child=\"1\"nth-of-type=\"1\";div.Navigation3000__scene:attr__class=\"Navigation3000__scene\"nth-child=\"2\"nth-of-type=\"2\";main:nth-child=\"4\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-bfa8-7a18-88bc-4c4df6cabd69", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "h9m0b1qconuwsw62", + "$time": 1697799913.386, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "Last 3 days", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:13.450000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:13.395000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight refresh time", + "id": "018b4cc2-c072-7506-930b-471161be099f", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "jnfwtms067nt0iva", + "$time": 1697799913.586, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 1053, + "insightShortId": "dr2P0xDf", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:16.465000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:13.602000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight refresh time", + "id": "018b4cc2-c10e-75bc-9275-189e47433d83", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "c2ryrw33lz0vijd7", + "$time": 1697799913.743, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 1210, + "insightShortId": "gynVFet7", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:16.465000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:13.759000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight refresh time", + "id": "018b4cc2-c252-7c6f-b0d3-e2f321447b77", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "u7298ffp3754ebcm", + "$time": 1697799914.066, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 1534, + "insightShortId": "XijxpxgK", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:16.465000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:14.082000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight refresh time", + "id": "018b4cc2-c3c4-7ea1-8759-3043985f1e2a", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "sij6r7mjdn0dkglr", + "$time": 1697799914.436, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 1904, + "insightShortId": "12cRYEnk", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:16.465000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:14.453000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight refresh time", + "id": "018b4cc2-c4f5-77be-993e-9d88a05d46f8", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "gh9axm48pflzlu2l", + "$time": 1697799914.742, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 2209, + "insightShortId": "VRqTSTUU", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:16.465000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:14.758000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight refresh time", + "id": "018b4cc2-c5f7-7ff9-ac9c-9b0b7d7fcf7b", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "sshoh198v54qzg5s", + "$time": 1697799914.999, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 2466, + "insightShortId": "X4vwqdC6", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:16.465000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:15.015000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight refresh time", + "id": "018b4cc2-c6f4-7638-b108-639ce32e1062", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "45hmgrym77londxt", + "$time": 1697799915.253, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "loadingMilliseconds": 2720, + "insightShortId": "vTZuWkZE", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:16.465000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:15.269000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": [ + "LemonButton", + "LemonButton--full-width", + "LemonButton--status-stealth", + "LemonButton--tertiary" + ], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton LemonButton--tertiary LemonButton--status-stealth LemonButton--full-width", + "attr__type": "button" + }, + "href": null, + "nth_child": 16.0, + "nth_of_type": 15.0, + "order": 0.0, + "tag_name": "button", + "text": "Custom fixed date range\u2026" + }, + { + "attr_class": ["space-y-px"], + "attr_id": null, + "attributes": { + "attr__class": "space-y-px" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__content"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover__box"], + "attr_id": null, + "attributes": { + "attr__class": "Popover__box" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Popover", "Popover--actionable", "Popover--enter-done"], + "attr_id": null, + "attributes": { + "attr__aria-level": "0", + "attr__class": "Popover Popover--actionable Popover--enter-done", + "attr__data-placement": "bottom-start", + "attr__style": "position: fixed; top: 95.5747px; left: 317.718px; max-height: 759.425px; max-width: 1190.28px; width: initial;" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__data-floating-ui-portal": "\"attr__id=", + "floating-ui-6\"attr_id": "floating-ui-6" + }, + "href": null, + "nth_child": 16.0, + "nth_of_type": 12.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "button.LemonButton.LemonButton--full-width.LemonButton--status-stealth.LemonButton--tertiary:attr__class=\"LemonButton LemonButton--tertiary LemonButton--status-stealth LemonButton--full-width\"attr__type=\"button\"nth-child=\"16\"nth-of-type=\"15\"text=\"Custom fixed date range\u2026\";div.space-y-px:attr__class=\"space-y-px\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__content:attr__class=\"Popover__content\"nth-child=\"1\"nth-of-type=\"1\";div.Popover__box:attr__class=\"Popover__box\"nth-child=\"1\"nth-of-type=\"1\";div.Popover.Popover--actionable.Popover--enter-done:attr__aria-level=\"0\"attr__class=\"Popover Popover--actionable Popover--enter-done\"attr__data-placement=\"bottom-start\"attr__style=\"position: fixed; top: 95.5747px; left: 317.718px; max-height: 759.425px; max-width: 1190.28px; width: initial;\"nth-child=\"1\"nth-of-type=\"1\";div:attr__data-floating-ui-portal=\"\"attr__id=\"floating-ui-6\"attr_id=\"floating-ui-6\"nth-child=\"16\"nth-of-type=\"12\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-caed-756a-af22-999c0b649d92", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "ubrot88ulvd6b8lg", + "$time": 1697799916.27, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "Custom fixed date range\u2026", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:16.465000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:16.286000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": null, + "attr_id": null, + "attributes": { + "attr__clip-rule": "evenodd", + "attr__d": "M12.162 3.12a.25.25 0 0 0-.324 0l-7.25 6.152a.25.25 0 0 0-.088.191v9.787c0 .138.112.25.25.25h4a.25.25 0 0 0 .25-.25v-3.5c0-.966.784-1.75 1.75-1.75h2.5c.966 0 1.75.784 1.75 1.75v3.5c0 .138.112.25.25.25h4a.25.25 0 0 0 .25-.25V9.463a.25.25 0 0 0-.088-.19l-7.25-6.152Zm-1.294-1.143a1.75 1.75 0 0 1 2.264 0l7.25 6.152c.392.332.618.82.618 1.334v9.787A1.75 1.75 0 0 1 19.25 21h-4a1.75 1.75 0 0 1-1.75-1.75v-3.5a.25.25 0 0 0-.25-.25h-2.5a.25.25 0 0 0-.25.25v3.5A1.75 1.75 0 0 1 8.75 21h-4A1.75 1.75 0 0 1 3 19.25V9.463c0-.514.226-1.002.618-1.334l7.25-6.152Z", + "attr__fill-rule": "evenodd", + "attr__href": "/home" + }, + "href": "/home", + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "path", + "text": null + }, + { + "attr_class": ["LemonIcon"], + "attr_id": null, + "attributes": { + "attr__class": "LemonIcon", + "attr__fill": "currentColor", + "attr__viewBox": "0 0 24 24", + "attr__width": "100%", + "attr__xmlns": "http://www.w3.org/2000/svg" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "svg", + "text": null + }, + { + "attr_class": ["LemonButton__icon"], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton__icon" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "span", + "text": null + }, + { + "attr_class": [ + "LemonButton", + "LemonButton--has-icon", + "LemonButton--no-content", + "LemonButton--status-primary", + "LemonButton--tertiary", + "Link", + "ant-tooltip-open" + ], + "attr_id": null, + "attributes": { + "attr__class": "Link LemonButton LemonButton--tertiary LemonButton--status-primary LemonButton--no-content LemonButton--has-icon ant-tooltip-open", + "attr__data-attr": "menu-item-projecthomepage", + "attr__draggable": "true", + "attr__href": "/home" + }, + "href": "/home", + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "a", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "li", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "ul", + "text": null + }, + { + "attr_class": ["Navbar3000__top"], + "attr_id": null, + "attributes": { + "attr__class": "Navbar3000__top" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navbar3000__content"], + "attr_id": null, + "attributes": { + "attr__class": "Navbar3000__content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navbar3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navbar3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 8.0, + "tag_name": "nav", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 9.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "path:attr__clip-rule=\"evenodd\"attr__d=\"M12.162 3.12a.25.25 0 0 0-.324 0l-7.25 6.152a.25.25 0 0 0-.088.191v9.787c0 .138.112.25.25.25h4a.25.25 0 0 0 .25-.25v-3.5c0-.966.784-1.75 1.75-1.75h2.5c.966 0 1.75.784 1.75 1.75v3.5c0 .138.112.25.25.25h4a.25.25 0 0 0 .25-.25V9.463a.25.25 0 0 0-.088-.19l-7.25-6.152Zm-1.294-1.143a1.75 1.75 0 0 1 2.264 0l7.25 6.152c.392.332.618.82.618 1.334v9.787A1.75 1.75 0 0 1 19.25 21h-4a1.75 1.75 0 0 1-1.75-1.75v-3.5a.25.25 0 0 0-.25-.25h-2.5a.25.25 0 0 0-.25.25v3.5A1.75 1.75 0 0 1 8.75 21h-4A1.75 1.75 0 0 1 3 19.25V9.463c0-.514.226-1.002.618-1.334l7.25-6.152Z\"attr__fill-rule=\"evenodd\"attr__href=\"/home\"href=\"/home\"nth-child=\"1\"nth-of-type=\"1\";svg.LemonIcon:attr__class=\"LemonIcon\"attr__fill=\"currentColor\"attr__viewBox=\"0 0 24 24\"attr__width=\"100%\"attr__xmlns=\"http://www.w3.org/2000/svg\"nth-child=\"1\"nth-of-type=\"1\";span.LemonButton__icon:attr__class=\"LemonButton__icon\"nth-child=\"1\"nth-of-type=\"1\";a.LemonButton.LemonButton--has-icon.LemonButton--no-content.LemonButton--status-primary.LemonButton--tertiary.Link.ant-tooltip-open:attr__class=\"Link LemonButton LemonButton--tertiary LemonButton--status-primary LemonButton--no-content LemonButton--has-icon ant-tooltip-open\"attr__data-attr=\"menu-item-projecthomepage\"attr__draggable=\"true\"attr__href=\"/home\"href=\"/home\"nth-child=\"1\"nth-of-type=\"1\";li:nth-child=\"1\"nth-of-type=\"1\";ul:nth-child=\"1\"nth-of-type=\"1\";div.Navbar3000__top:attr__class=\"Navbar3000__top\"nth-child=\"1\"nth-of-type=\"1\";div.Navbar3000__content:attr__class=\"Navbar3000__content\"nth-child=\"1\"nth-of-type=\"1\";nav.Navbar3000:attr__class=\"Navbar3000\"nth-child=\"1\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-d38c-7d90-9fc6-230883fb4c9d", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "az8i1ijfsp7dcu8f", + "$time": 1697799918.476, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:19.477000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:18.486000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4cc2-d395-72e8-a051-8b267d8e7066", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "wqe2gfkuym7xcp8o", + "$time": 1697799918.485, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "product-specific-onboarding", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:19.477000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:18.495000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4cc2-d398-7794-933f-fa78a242c2cb", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "pbwgsdeb7kudbck5", + "$time": 1697799918.489, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "title": "Homepage \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:19.477000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:18.498000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": [ + "LemonButton", + "LemonButton--has-icon", + "LemonButton--has-side-icon", + "LemonButton--primary", + "LemonButton--small", + "LemonButton--status-primary", + "Link" + ], + "attr_id": null, + "attributes": { + "attr__class": "Link LemonButton LemonButton--primary LemonButton--status-primary LemonButton--small LemonButton--has-icon LemonButton--has-side-icon", + "attr__data-attr": "saved-insights-new-insight-button", + "attr__draggable": "true", + "attr__href": "/insights/new" + }, + "href": "/insights/new", + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "a", + "text": "New insight" + }, + { + "attr_class": ["LemonButtonWithSideAction", "LemonButtonWithSideAction--small"], + "attr_id": null, + "attributes": { + "attr__class": "LemonButtonWithSideAction LemonButtonWithSideAction--small" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Breadcrumbs3000__actions"], + "attr_id": null, + "attributes": { + "attr__class": "Breadcrumbs3000__actions" + }, + "href": null, + "nth_child": 7.0, + "nth_of_type": 6.0, + "order": 2.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Breadcrumbs3000"], + "attr_id": null, + "attributes": { + "attr__class": "Breadcrumbs3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "main", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "a.LemonButton.LemonButton--has-icon.LemonButton--has-side-icon.LemonButton--primary.LemonButton--small.LemonButton--status-primary.Link:attr__class=\"Link LemonButton LemonButton--primary LemonButton--status-primary LemonButton--small LemonButton--has-icon LemonButton--has-side-icon\"attr__data-attr=\"saved-insights-new-insight-button\"attr__draggable=\"true\"attr__href=\"/insights/new\"href=\"/insights/new\"nth-child=\"1\"nth-of-type=\"1\"text=\"New insight\";div.LemonButtonWithSideAction.LemonButtonWithSideAction--small:attr__class=\"LemonButtonWithSideAction LemonButtonWithSideAction--small\"nth-child=\"2\"nth-of-type=\"1\";div.Breadcrumbs3000__actions:attr__class=\"Breadcrumbs3000__actions\"nth-child=\"7\"nth-of-type=\"6\";div.Breadcrumbs3000:attr__class=\"Breadcrumbs3000\"nth-child=\"1\"nth-of-type=\"1\";main:nth-child=\"4\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-d79c-7232-afc4-a7c7814d2b09", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$host": "localhost:8000", + "$pathname": "/home", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "ak2ihfmu0jx88a2d", + "$time": 1697799919.516, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "New insight", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/home", + "$pathname": "/home", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/home", + "$initial_pathname": "/home", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:22.490000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:19.533000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4cc2-d81e-7875-a2ac-8846ffd710c1", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "ccaf8o48a027gcdt", + "$time": 1697799919.646, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "funnels-cue-opt-out-7301", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:22.490000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:19.663000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4cc2-d835-7d31-af5a-5294ec922967", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "fnd160khukj3ji6d", + "$time": 1697799919.669, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "smoothing-interval", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:22.490000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:19.686000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4cc2-d922-77e2-bc50-8d34466436e3", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "p3n4m8xa3afpo3tx", + "$time": 1697799919.906, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "title": "Unnamed \u2022 Insights \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:22.490000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:19.923000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "query completed", + "id": "018b4cc2-d937-7dc2-a69b-339daae7720d", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "b1ukfohazj37608n", + "$time": 1697799919.927, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "query": { + "kind": "TrendsQuery", + "filterTestAccounts": false, + "series": [ + { + "kind": "EventsNode", + "event": "$pageview", + "name": "$pageview", + "math": "total" + } + ], + "interval": "day", + "trendsFilter": { + "display": "ActionsLineGraph" + } + }, + "duration": 275.5999999642372, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:22.490000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:19.944000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight created", + "id": "018b4cc2-d9b7-7c1f-a9e9-94c564b7a7de", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "wj66f9ygnvcad5wh", + "$time": 1697799920.055, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "insight": "TRENDS", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:22.490000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:20.072000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": ["LemonButton__content"], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton__content" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 0.0, + "tag_name": "span", + "text": "Last 7 days" + }, + { + "attr_class": [ + "LemonButton", + "LemonButton--has-icon", + "LemonButton--has-side-icon", + "LemonButton--secondary", + "LemonButton--small", + "LemonButton--status-stealth" + ], + "attr_id": "daterange_selector", + "attributes": { + "attr__aria-disabled": "false", + "attr__aria-haspopup": "true", + "attr__class": "LemonButton LemonButton--secondary LemonButton--status-stealth LemonButton--small LemonButton--has-icon LemonButton--has-side-icon", + "attr__data-attr": "date-filter", + "attr__id": "daterange_selector", + "attr__type": "button" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "button", + "text": "Last 7 days" + }, + { + "attr_class": ["flex", "items-center", "space-x-2", "text-sm"], + "attr_id": null, + "attributes": { + "attr__class": "space-x-2 flex items-center text-sm" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "span", + "text": null + }, + { + "attr_class": ["flex", "flex-wrap", "gap-x-2", "gap-y-2", "items-center", "my-2"], + "attr_id": null, + "attributes": { + "attr__class": "flex items-center gap-x-2 flex-wrap my-2 gap-y-2" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex", "flex-wrap", "items-center", "justify-between"], + "attr_id": null, + "attributes": { + "attr__class": "flex justify-between items-center flex-wrap", + "attr__data-attr": "insight-filters" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ant-card-head-title"], + "attr_id": null, + "attributes": { + "attr__class": "ant-card-head-title" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ant-card-head-wrapper"], + "attr_id": null, + "attributes": { + "attr__class": "ant-card-head-wrapper" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ant-card-head"], + "attr_id": null, + "attributes": { + "attr__class": "ant-card-head" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ant-card", "ant-card-bordered", "insights-graph-container"], + "attr_id": null, + "attributes": { + "attr__class": "ant-card ant-card-bordered insights-graph-container", + "attr__data-attr": "insights-graph" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["insights-container"], + "attr_id": null, + "attributes": { + "attr__class": "insights-container", + "attr__data-attr": "insight-view" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 9.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["insight-wrapper"], + "attr_id": null, + "attributes": { + "attr__class": "insight-wrapper" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 4.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["insights-page"], + "attr_id": null, + "attributes": { + "attr__class": "insights-page" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000__scene"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000__scene" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 12.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 13.0, + "tag_name": "main", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 14.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 15.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 16.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "span.LemonButton__content:attr__class=\"LemonButton__content\"nth-child=\"2\"nth-of-type=\"2\"text=\"Last 7 days\";button.LemonButton.LemonButton--has-icon.LemonButton--has-side-icon.LemonButton--secondary.LemonButton--small.LemonButton--status-stealth:attr__aria-disabled=\"false\"attr__aria-haspopup=\"true\"attr__class=\"LemonButton LemonButton--secondary LemonButton--status-stealth LemonButton--small LemonButton--has-icon LemonButton--has-side-icon\"attr__data-attr=\"date-filter\"attr__id=\"daterange_selector\"attr__type=\"button\"attr_id=\"daterange_selector\"nth-child=\"1\"nth-of-type=\"1\"text=\"Last 7 days\";span.flex.items-center.space-x-2.text-sm:attr__class=\"space-x-2 flex items-center text-sm\"nth-child=\"1\"nth-of-type=\"1\";div.flex.flex-wrap.gap-x-2.gap-y-2.items-center.my-2:attr__class=\"flex items-center gap-x-2 flex-wrap my-2 gap-y-2\"nth-child=\"1\"nth-of-type=\"1\";div.flex.flex-wrap.items-center.justify-between:attr__class=\"flex justify-between items-center flex-wrap\"attr__data-attr=\"insight-filters\"nth-child=\"1\"nth-of-type=\"1\";div.ant-card-head-title:attr__class=\"ant-card-head-title\"nth-child=\"1\"nth-of-type=\"1\";div.ant-card-head-wrapper:attr__class=\"ant-card-head-wrapper\"nth-child=\"1\"nth-of-type=\"1\";div.ant-card-head:attr__class=\"ant-card-head\"nth-child=\"1\"nth-of-type=\"1\";div.ant-card.ant-card-bordered.insights-graph-container:attr__class=\"ant-card ant-card-bordered insights-graph-container\"attr__data-attr=\"insights-graph\"nth-child=\"1\"nth-of-type=\"1\";div.insights-container:attr__class=\"insights-container\"attr__data-attr=\"insight-view\"nth-child=\"2\"nth-of-type=\"2\";div.insight-wrapper:attr__class=\"insight-wrapper\"nth-child=\"4\"nth-of-type=\"4\";div.insights-page:attr__class=\"insights-page\"nth-child=\"1\"nth-of-type=\"1\";div.Navigation3000__scene:attr__class=\"Navigation3000__scene\"nth-child=\"2\"nth-of-type=\"2\";main:nth-child=\"4\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-e0d4-72f0-8d6d-5b301c3790d8", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "knfewpc9dbfjykww", + "$time": 1697799921.876, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "Last 7 days", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:22.490000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:21.893000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": ["LemonButton__content"], + "attr_id": null, + "attributes": { + "attr__class": "LemonButton__content" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 0.0, + "tag_name": "span", + "text": "Last 7 days" + }, + { + "attr_class": [ + "LemonButton", + "LemonButton--active", + "LemonButton--has-icon", + "LemonButton--has-side-icon", + "LemonButton--secondary", + "LemonButton--small", + "LemonButton--status-stealth" + ], + "attr_id": "daterange_selector", + "attributes": { + "attr__aria-disabled": "false", + "attr__aria-haspopup": "true", + "attr__class": "LemonButton LemonButton--secondary LemonButton--status-stealth LemonButton--small LemonButton--active LemonButton--has-icon LemonButton--has-side-icon", + "attr__data-attr": "date-filter", + "attr__id": "daterange_selector", + "attr__type": "button" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 1.0, + "tag_name": "button", + "text": "Last 7 days" + }, + { + "attr_class": ["flex", "items-center", "space-x-2", "text-sm"], + "attr_id": null, + "attributes": { + "attr__class": "space-x-2 flex items-center text-sm" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "span", + "text": null + }, + { + "attr_class": ["flex", "flex-wrap", "gap-x-2", "gap-y-2", "items-center", "my-2"], + "attr_id": null, + "attributes": { + "attr__class": "flex items-center gap-x-2 flex-wrap my-2 gap-y-2" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["flex", "flex-wrap", "items-center", "justify-between"], + "attr_id": null, + "attributes": { + "attr__class": "flex justify-between items-center flex-wrap", + "attr__data-attr": "insight-filters" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ant-card-head-title"], + "attr_id": null, + "attributes": { + "attr__class": "ant-card-head-title" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ant-card-head-wrapper"], + "attr_id": null, + "attributes": { + "attr__class": "ant-card-head-wrapper" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ant-card-head"], + "attr_id": null, + "attributes": { + "attr__class": "ant-card-head" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["ant-card", "ant-card-bordered", "insights-graph-container"], + "attr_id": null, + "attributes": { + "attr__class": "ant-card ant-card-bordered insights-graph-container", + "attr__data-attr": "insights-graph" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["insights-container"], + "attr_id": null, + "attributes": { + "attr__class": "insights-container", + "attr__data-attr": "insight-view" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 9.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["insight-wrapper"], + "attr_id": null, + "attributes": { + "attr__class": "insight-wrapper" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 4.0, + "order": 10.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["insights-page"], + "attr_id": null, + "attributes": { + "attr__class": "insights-page" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 11.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000__scene"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000__scene" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 12.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 13.0, + "tag_name": "main", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 14.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 15.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 16.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "span.LemonButton__content:attr__class=\"LemonButton__content\"nth-child=\"2\"nth-of-type=\"2\"text=\"Last 7 days\";button.LemonButton.LemonButton--active.LemonButton--has-icon.LemonButton--has-side-icon.LemonButton--secondary.LemonButton--small.LemonButton--status-stealth:attr__aria-disabled=\"false\"attr__aria-haspopup=\"true\"attr__class=\"LemonButton LemonButton--secondary LemonButton--status-stealth LemonButton--small LemonButton--active LemonButton--has-icon LemonButton--has-side-icon\"attr__data-attr=\"date-filter\"attr__id=\"daterange_selector\"attr__type=\"button\"attr_id=\"daterange_selector\"nth-child=\"1\"nth-of-type=\"1\"text=\"Last 7 days\";span.flex.items-center.space-x-2.text-sm:attr__class=\"space-x-2 flex items-center text-sm\"nth-child=\"1\"nth-of-type=\"1\";div.flex.flex-wrap.gap-x-2.gap-y-2.items-center.my-2:attr__class=\"flex items-center gap-x-2 flex-wrap my-2 gap-y-2\"nth-child=\"1\"nth-of-type=\"1\";div.flex.flex-wrap.items-center.justify-between:attr__class=\"flex justify-between items-center flex-wrap\"attr__data-attr=\"insight-filters\"nth-child=\"1\"nth-of-type=\"1\";div.ant-card-head-title:attr__class=\"ant-card-head-title\"nth-child=\"1\"nth-of-type=\"1\";div.ant-card-head-wrapper:attr__class=\"ant-card-head-wrapper\"nth-child=\"1\"nth-of-type=\"1\";div.ant-card-head:attr__class=\"ant-card-head\"nth-child=\"1\"nth-of-type=\"1\";div.ant-card.ant-card-bordered.insights-graph-container:attr__class=\"ant-card ant-card-bordered insights-graph-container\"attr__data-attr=\"insights-graph\"nth-child=\"1\"nth-of-type=\"1\";div.insights-container:attr__class=\"insights-container\"attr__data-attr=\"insight-view\"nth-child=\"2\"nth-of-type=\"2\";div.insight-wrapper:attr__class=\"insight-wrapper\"nth-child=\"4\"nth-of-type=\"4\";div.insights-page:attr__class=\"insights-page\"nth-child=\"1\"nth-of-type=\"1\";div.Navigation3000__scene:attr__class=\"Navigation3000__scene\"nth-child=\"2\"nth-of-type=\"2\";main:nth-child=\"4\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-e3eb-739f-9033-12ea1e9c896e", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "33gzljaml77fxfme", + "$time": 1697799922.668, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "$el_text": "Last 7 days", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:25.504000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:22.675000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [ + { + "attr_class": ["LemonTabs__tab-content"], + "attr_id": null, + "attributes": { + "attr__class": "LemonTabs__tab-content" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 0.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["LemonTabs__tab"], + "attr_id": null, + "attributes": { + "attr__aria-selected": "false", + "attr__class": "LemonTabs__tab", + "attr__role": "tab", + "attr__tabindex": "0" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 1.0, + "tag_name": "li", + "text": null + }, + { + "attr_class": ["LemonTabs__bar"], + "attr_id": null, + "attributes": { + "attr__class": "LemonTabs__bar", + "attr__role": "tablist" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 2.0, + "tag_name": "ul", + "text": null + }, + { + "attr_class": ["LemonTabs"], + "attr_id": null, + "attributes": { + "attr__class": "LemonTabs", + "attr__style": "--lemon-tabs-slider-width: 44.171875px; --lemon-tabs-slider-offset: 0px;" + }, + "href": null, + "nth_child": 3.0, + "nth_of_type": 3.0, + "order": 3.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["insights-page"], + "attr_id": null, + "attributes": { + "attr__class": "insights-page" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 4.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["Navigation3000__scene"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000__scene" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 2.0, + "order": 5.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": null, + "attributes": {}, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 6.0, + "tag_name": "main", + "text": null + }, + { + "attr_class": ["Navigation3000"], + "attr_id": null, + "attributes": { + "attr__class": "Navigation3000" + }, + "href": null, + "nth_child": 1.0, + "nth_of_type": 1.0, + "order": 7.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": null, + "attr_id": "root", + "attributes": { + "attr__id": "root" + }, + "href": null, + "nth_child": 4.0, + "nth_of_type": 1.0, + "order": 8.0, + "tag_name": "div", + "text": null + }, + { + "attr_class": ["posthog-3000"], + "attr_id": null, + "attributes": { + "attr__class": "posthog-3000", + "attr__theme": "light" + }, + "href": null, + "nth_child": 2.0, + "nth_of_type": 1.0, + "order": 9.0, + "tag_name": "body", + "text": null + } + ], + "elements_chain": "div.LemonTabs__tab-content:attr__class=\"LemonTabs__tab-content\"nth-child=\"1\"nth-of-type=\"1\";li.LemonTabs__tab:attr__aria-selected=\"false\"attr__class=\"LemonTabs__tab\"attr__role=\"tab\"attr__tabindex=\"0\"nth-child=\"2\"nth-of-type=\"2\";ul.LemonTabs__bar:attr__class=\"LemonTabs__bar\"attr__role=\"tablist\"nth-child=\"1\"nth-of-type=\"1\";div.LemonTabs:attr__class=\"LemonTabs\"attr__style=\"--lemon-tabs-slider-width: 44.171875px; --lemon-tabs-slider-offset: 0px;\"nth-child=\"3\"nth-of-type=\"3\";div.insights-page:attr__class=\"insights-page\"nth-child=\"1\"nth-of-type=\"1\";div.Navigation3000__scene:attr__class=\"Navigation3000__scene\"nth-child=\"2\"nth-of-type=\"2\";main:nth-child=\"4\"nth-of-type=\"1\";div.Navigation3000:attr__class=\"Navigation3000\"nth-child=\"1\"nth-of-type=\"1\";div:attr__id=\"root\"attr_id=\"root\"nth-child=\"4\"nth-of-type=\"1\";body.posthog-3000:attr__class=\"posthog-3000\"attr__theme=\"light\"nth-child=\"2\"nth-of-type=\"1\"", + "event": "$autocapture", + "id": "018b4cc2-f490-7227-8d35-65880c75cac8", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "dwrvit8pfft8n550", + "$time": 1697799926.928, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$event_type": "click", + "$ce_version": 1, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:28.530000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:26.936000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "query completed", + "id": "018b4cc2-f5b9-730b-9751-a1d1eb4ecce3", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "qyw1jkyxn1okzklx", + "$time": 1697799927.226, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "query": { + "kind": "FunnelsQuery", + "series": [ + { + "kind": "EventsNode", + "name": "$pageview", + "event": "$pageview" + } + ], + "funnelsFilter": { + "funnel_viz_type": "steps" + } + }, + "duration": 237.10000002384186, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:28.530000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:27.233000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight viewed", + "id": "018b4cc2-f68d-7712-8860-14242f72c761", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "ing4xyzshu1yped4", + "$time": 1697799927.437, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "filters_count": 0, + "events_count": 1, + "actions_count": 0, + "funnel_viz_type": "steps", + "properties_global": [], + "properties_global_custom_count": 0, + "properties_local": [], + "properties_local_custom_count": 0, + "properties_all": [], + "aggregating_by_groups": false, + "breakdown_by_groups": false, + "using_groups": false, + "used_cohort_filter_ids": [], + "insight": "FUNNELS", + "report_delay": 0, + "is_first_component_load": true, + "from_dashboard": false, + "total_event_actions_count": 1, + "total_event_action_filters_count": 0, + "mode": "edit", + "viewer_is_creator": false, + "description_length": 0, + "tags_count": 0, + "changed_insight": "TRENDS", + "changed_entity_type": [], + "changed_interval": "day", + "changed_hidden_legend_keys": [], + "changed_funnel_viz_type": [], + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:28.530000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:27.445000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "insight analyzed", + "id": "018b4cc3-1da3-79bc-84eb-ba4883cd3036", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "679ovhx0nx9fbz6c", + "$time": 1697799937.443, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "filters_count": 0, + "events_count": 1, + "actions_count": 0, + "funnel_viz_type": "steps", + "properties_global": [], + "properties_global_custom_count": 0, + "properties_local": [], + "properties_local_custom_count": 0, + "properties_all": [], + "aggregating_by_groups": false, + "breakdown_by_groups": false, + "using_groups": false, + "used_cohort_filter_ids": [], + "insight": "FUNNELS", + "report_delay": 10, + "is_first_component_load": false, + "from_dashboard": false, + "total_event_actions_count": 1, + "total_event_action_filters_count": 0, + "mode": "edit", + "viewer_is_creator": false, + "description_length": 0, + "tags_count": 0, + "changed_insight": "TRENDS", + "changed_entity_type": [], + "changed_interval": "day", + "changed_hidden_legend_keys": [], + "changed_funnel_viz_type": [], + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:37.539000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:37.451000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageleave", + "id": "018b4cc3-513e-7fef-ab2a-04ab143a4f1c", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$host": "localhost:8000", + "$pathname": "/insights/new", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "l3xm3x64ia727uwa", + "$time": 1697799950.655, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4cc2-908b-7989-a77a-ae1396eef1de", + "$window_id": "018b4cc2-908b-7989-a77a-ae1465fa2118", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/insights/new", + "$pathname": "/insights/new", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/insights/new", + "$initial_pathname": "/insights/new", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:50.657000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:50.674000+00:00", + "uuid": null + } + ], + "recording_duration_s": 50.0, + "sessionId": "018b4cc2-908b-7989-a77a-ae1396eef1de" + }, + { + "events": [ + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$set", + "id": "018b4ca3-1d07-7195-8202-c8a0a4015ef4", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "l2e0vvj7g4kj2vyc", + "$time": 1697797840.135, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$set": { + "email": "test@posthog.com", + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.150000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4ca3-1d08-72d6-8912-1c884aeb47ae", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "wla12x9igfbgty32", + "$time": 1697797840.136, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "project", + "$group_key": "018a92f8-b602-0000-75de-4b9073693531", + "$group_set": { + "id": 1, + "uuid": "018a92f8-b602-0000-75de-4b9073693531", + "name": "Hedgebox", + "ingested_event": true, + "is_demo": false, + "timezone": "UTC", + "instance_tag": "none" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.151000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4ca3-1d09-7c1f-b227-cf790742dcb0", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "v9gjsx4xig703btp", + "$time": 1697797840.137, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "organization", + "$group_key": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_set": { + "id": "018a92f8-afff-0000-efec-ca77de39e384", + "name": "Hedgebox Inc.", + "slug": "hedgebox-inc", + "created_at": "2023-09-14T09:14:46.145060Z", + "available_features": [ + "zapier", + "slack_integration", + "microsoft_teams_integration", + "discord_integration", + "apps", + "app_metrics", + "boolean_flags", + "multivariate_flags", + "persist_flags_cross_authentication", + "feature_flag_payloads", + "multiple_release_conditions", + "release_condition_overrides", + "targeting_by_group", + "local_evaluation_and_bootstrapping", + "flag_usage_stats", + "experimentation", + "group_experiments", + "funnel_experiments", + "secondary_metrics", + "statistical_analysis", + "console_logs", + "recordings_playlists", + "recordings_performance", + "recordings_file_export", + "group_analytics", + "dashboards", + "funnels", + "graphs_trends", + "paths", + "subscriptions", + "paths_advanced", + "dashboard_permissioning", + "dashboard_collaboration", + "ingestion_taxonomy", + "correlation_analysis", + "tagging", + "behavioral_cohort_filtering", + "tracked_users", + "data_retention", + "team_members", + "organizations_projects", + "api_access", + "project_based_permissioning", + "social_sso", + "sso_enforcement", + "white_labelling", + "community_support", + "dedicated_support", + "email_support", + "terms_and_conditions", + "security_assessment" + ], + "instance_tag": "none" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.152000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca3-1d5d-7d58-ac71-0a9e143afc1e", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "vhe47zdv0zexeeuk", + "$time": 1697797840.222, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "posthog-3000", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.237000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca3-1d5e-7346-afdf-2400e5e7f7d6", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "96opt7bntz65vkew", + "$time": 1697797840.222, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "enable-prompts", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.237000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca3-1d63-70c3-bc8e-32652a5076c2", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "it0bs3zaxzky8511", + "$time": 1697797840.228, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "notebooks", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.243000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$groupidentify", + "id": "018b4ca3-1db7-7917-8e7e-ce4d6db6aac0", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "b6ktfl60jskki58x", + "$time": 1697797840.312, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$group_type": "instance", + "$group_key": "http://localhost:8000", + "$group_set": { + "site_url": "http://localhost:8000" + }, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.327000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca3-1dcd-712b-befa-52f257d2144b", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "kwzuadx4g5n5ms1d", + "$time": 1697797840.334, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "cs-dashboards", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.350000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca3-1dd4-79b4-9077-540e9c915f51", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "s04xofpsygbioozp", + "$time": 1697797840.344, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "title": "Persons \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.359000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "query completed", + "id": "018b4ca3-1f42-700e-8bef-a6ad468cff87", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "cksjqjsjm4csoovv", + "$time": 1697797840.706, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "query": { + "kind": "HogQLQuery", + "query": "select id, groupArray(pdi.distinct_id) as distinct_ids, properties, is_identified, created_at from persons where pdi.distinct_id={distinct_id} group by id, properties, is_identified, created_at", + "values": { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI" + } + }, + "duration": 516.8999999761581, + "clickhouse_sql": "SELECT persons.id, groupArray(persons__pdi.distinct_id) AS distinct_ids, persons.properties, persons.is_identified, toTimeZone(persons.created_at, %(hogql_val_0)s) FROM (SELECT person.id, person.properties AS properties, person.is_identified AS is_identified, person.created_at AS created_at FROM person WHERE and(equals(person.team_id, 1), ifNull(in(tuple(person.id, person.version), (SELECT person.id, max(person.version) AS version FROM person WHERE equals(person.team_id, 1) GROUP BY person.id HAVING ifNull(equals(argMax(person.is_deleted, person.version), 0), 0))), 0)) SETTINGS optimize_aggregation_in_order=1) AS persons INNER JOIN (SELECT argMax(person_distinct_id2.person_id, person_distinct_id2.version) AS person_id, person_distinct_id2.distinct_id AS distinct_id FROM person_distinct_id2 WHERE equals(person_distinct_id2.team_id, 1) GROUP BY person_distinct_id2.distinct_id HAVING ifNull(equals(argMax(person_distinct_id2.is_deleted, person_distinct_id2.version), 0), 0)) AS persons__pdi ON equals(persons.id, persons__pdi.person_id) WHERE ifNull(equals(persons__pdi.distinct_id, %(hogql_val_1)s), 0) GROUP BY persons.id, persons.properties, persons.is_identified, toTimeZone(persons.created_at, %(hogql_val_2)s) LIMIT 100 SETTINGS readonly=2, max_execution_time=60, allow_experimental_object_type=1", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.721000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca3-1f4d-7bfd-a091-0f8858fcc9ca", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "c1x6pz53d6fb5uqp", + "$time": 1697797840.718, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "debug-react-renders", + "$feature_flag_response": false, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.733000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca3-204e-7df2-a8b4-fe15ee87036d", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "37mvoi5i4r6lfyx3", + "$time": 1697797840.976, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "hogql-insights", + "$feature_flag_response": false, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:40.991000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "person viewed", + "id": "018b4ca3-2139-7a8e-ac11-02e2850ecabb", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "fgp2uq9geh5d7kkq", + "$time": 1697797841.209, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "properties_count": 40, + "has_email": true, + "has_name": false, + "custom_properties_count": 15, + "posthog_properties_count": 25, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:41.224000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "query completed", + "id": "018b4ca3-22e7-7fee-803e-8461e96d4eeb", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "w3rikko1nqif5eb6", + "$time": 1697797841.64, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "query": { + "kind": "SessionsTimelineQuery", + "after": "2021-01-01T18:00:00Z", + "before": "2024-01-01T06:00:00Z", + "personId": "018a92fd-a1c3-0000-4144-fb39888c298e" + }, + "duration": 664.7000000476837, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjNkMzZiZjU3LTFkYjQtNDJlZi1iNzhiLWUyZDNkNGViOWM5NSJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiI1MzE0ZWI2MS1lYTA2LTQyOGQtYWEzZi0zMDA4OWJjZmUxYjgifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImUwMjE1NDU5LWY0YTgtNGJjNy04NDhmLTYxODE1NzY1NzlmNyJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:41.655000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$feature_flag_called", + "id": "018b4ca3-2350-7671-8383-8c248f826729", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "h2xh5dfq47fd03xz", + "$time": 1697797841.745, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "$feature_flag": "product-specific-onboarding", + "$feature_flag_response": true, + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:41.760000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4ca3-2368-78f4-88db-9fdc90e949de", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "m2opmkybj5tuw159", + "$time": 1697797841.769, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "title": "test@posthog.com \u2022 Persons \u2022 PostHog", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:41.784000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "notebook content changed", + "id": "018b4ca3-236a-78d6-ba7b-155173c88abe", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "3tkk1xg4kr4es46n", + "$time": 1697797841.77, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T10:30:43.133000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T10:30:41.785000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$opt_in", + "id": "018b4cc2-9079-7685-b127-207ee22b84d4", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "kegpygvidsjwpa52", + "$time": 1697799901.306, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4cc2-9078-7092-8d1a-e6d2368f35a3", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:01.307000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.309000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageview", + "id": "018b4cc2-907c-7bc8-913f-5b1887950b73", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$host": "localhost:8000", + "$pathname": "/", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "l26gbkzemq3muzti", + "$time": 1697799901.308, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "title": "PostHog", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4cc2-9078-7092-8d1a-e6d2368f35a3", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/", + "$pathname": "/", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/", + "$initial_pathname": "/", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:01.308000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:01.312000+00:00", + "uuid": null + }, + { + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "elements": [], + "elements_chain": null, + "event": "$pageleave", + "id": "018b4cc3-527c-7bc2-b270-1e09559a1460", + "person": null, + "properties": { + "$os": "Mac OS X", + "$os_version": "10.15.7", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$host": "localhost:8000", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$browser_language": "en-GB", + "$screen_height": 982, + "$screen_width": 1512, + "$viewport_height": 859, + "$viewport_width": 1512, + "$lib": "web", + "$lib_version": "1.84.0", + "$insert_id": "ubvpv3ao6iaj9wk8", + "$time": 1697799950.972, + "distinct_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$device_id": "018a92fd-7b25-7770-a050-577e94fd148c", + "$console_log_recording_enabled_server_side": true, + "$session_recording_recorder_version_server_side": "v2", + "$autocapture_disabled_server_side": false, + "$active_feature_flags": [ + "signup-page-4.0", + "cmd-k-search", + "posthog-3000", + "funnels-cue-opt-out-7301", + "retention-breakdown", + "smoothing-interval", + "billing-limit", + "kafka-inspector", + "historical-exports-v2", + "person-on-events-enabled", + "region-select", + "ingestion-warnings-enabled", + "session-reset-on-load", + "recordings-on-feature-flags", + "auto-rollback-feature-flags", + "onboarding-v2-demo", + "feature-flag-rollout-ux", + "role-based-access", + "query_running_time", + "query-timings", + "recording-debugging", + "enable-prompts", + "feedback-scene", + "early-access-feature", + "early-access-feature-site-button", + "hedgehog-mode-debug", + "auto-redirect", + "session-recording-blob-replay", + "surveys", + "generic-signup-benefits", + "surveys-positions", + "web-analytics", + "high-frequency-batch-exports", + "exception-autocapture", + "data-warehouse", + "data-warehouse-views", + "ff-dashboard-templates", + "show-product-intro-existing-products", + "artificial-hog", + "cs-dashboards", + "product-specific-onboarding", + "redirect-signups-to-instance", + "apps-and-exports-ui", + "survey-nps-results", + "session-recording-allow-v1-snapshots", + "session-replay-cors-proxy", + "webhooks-denylist", + "surveys-site-app-deprecation", + "surveys-multiple-questions", + "surveys-results-visualizations", + "console-recording-search", + "persons-hogql-query", + "notebooks" + ], + "$feature/file-previews": false, + "$feature/signup-page-4.0": "control", + "$feature/hogql-insights": false, + "$feature/cmd-k-search": true, + "$feature/posthog-3000": true, + "$feature/funnels-cue-opt-out-7301": true, + "$feature/retention-breakdown": true, + "$feature/smoothing-interval": true, + "$feature/billing-limit": true, + "$feature/kafka-inspector": true, + "$feature/historical-exports-v2": true, + "$feature/person-on-events-enabled": true, + "$feature/region-select": true, + "$feature/ingestion-warnings-enabled": true, + "$feature/session-reset-on-load": true, + "$feature/recordings-on-feature-flags": true, + "$feature/auto-rollback-feature-flags": true, + "$feature/onboarding-v2-demo": true, + "$feature/feature-flag-rollout-ux": true, + "$feature/role-based-access": true, + "$feature/query_running_time": true, + "$feature/query-timings": true, + "$feature/recording-debugging": true, + "$feature/enable-prompts": true, + "$feature/feedback-scene": true, + "$feature/early-access-feature": true, + "$feature/early-access-feature-site-button": true, + "$feature/hedgehog-mode-debug": true, + "$feature/auto-redirect": true, + "$feature/session-recording-blob-replay": true, + "$feature/surveys": true, + "$feature/generic-signup-benefits": true, + "$feature/surveys-positions": true, + "$feature/web-analytics": true, + "$feature/high-frequency-batch-exports": true, + "$feature/exception-autocapture": true, + "$feature/data-warehouse": true, + "$feature/data-warehouse-views": true, + "$feature/ff-dashboard-templates": true, + "$feature/show-product-intro-existing-products": true, + "$feature/artificial-hog": true, + "$feature/cs-dashboards": true, + "$feature/product-specific-onboarding": true, + "$feature/redirect-signups-to-instance": true, + "$feature/apps-and-exports-ui": true, + "$feature/survey-nps-results": true, + "$feature/session-recording-allow-v1-snapshots": true, + "$feature/session-replay-cors-proxy": true, + "$feature/webhooks-denylist": true, + "$feature/surveys-site-app-deprecation": true, + "$feature/surveys-multiple-questions": true, + "$feature/surveys-results-visualizations": true, + "$feature/console-recording-search": true, + "$feature/persons-hogql-query": true, + "$feature/notebooks": true, + "$feature_flag_payloads": {}, + "realm": "hosted-clickhouse", + "email_service_available": false, + "slack_service_available": false, + "$user_id": "krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "is_demo_project": false, + "$groups": { + "project": "018a92f8-b602-0000-75de-4b9073693531", + "organization": "018a92f8-afff-0000-efec-ca77de39e384", + "customer": "cus_OdSgxxWtHOtq9g", + "instance": "http://localhost:8000" + }, + "has_billing_plan": true, + "customer_deactivated": false, + "current_total_amount_usd": "0.00", + "percentage_usage.product_analytics": 0, + "current_amount_usd.product_analytics": "0.00", + "unit_amount_usd.product_analytics": null, + "usage_limit.product_analytics": null, + "current_usage.product_analytics": 3729, + "projected_usage.product_analytics": 0, + "free_allocation.product_analytics": 0, + "percentage_usage.session_replay": 0, + "current_amount_usd.session_replay": "0.00", + "unit_amount_usd.session_replay": null, + "usage_limit.session_replay": null, + "current_usage.session_replay": 254, + "projected_usage.session_replay": 0, + "free_allocation.session_replay": 0, + "percentage_usage.feature_flags": 0, + "current_amount_usd.feature_flags": "0.00", + "unit_amount_usd.feature_flags": null, + "usage_limit.feature_flags": null, + "current_usage.feature_flags": 0, + "projected_usage.feature_flags": 0, + "free_allocation.feature_flags": 0, + "percentage_usage.integrations": 0, + "current_amount_usd.integrations": null, + "unit_amount_usd.integrations": null, + "usage_limit.integrations": 0, + "current_usage.integrations": 0, + "projected_usage.integrations": 0, + "free_allocation.integrations": 0, + "percentage_usage.platform_and_support": 0, + "current_amount_usd.platform_and_support": null, + "unit_amount_usd.platform_and_support": null, + "usage_limit.platform_and_support": 0, + "current_usage.platform_and_support": 0, + "projected_usage.platform_and_support": 0, + "free_allocation.platform_and_support": 0, + "billing_period_start": "2023-10-14T09:22:33.000Z", + "billing_period_end": "2023-11-14T09:22:33.000Z", + "$referrer": "$direct", + "$referring_domain": "$direct", + "token": "phc_OWGI2wKbfi7rWDZT9wkl8uGcfe0wOGCvflLEZSMiaT0", + "$session_id": "018b4ca3-1d07-7195-8202-c8a103a9325a", + "$window_id": "018b4ca3-1d07-7195-8202-c8a2b00ad68a", + "$ip": "127.0.0.1", + "$set": { + "$os": "Mac OS X", + "$browser": "Chrome", + "$device_type": "Desktop", + "$current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$browser_version": 117, + "$referrer": "$direct", + "$referring_domain": "$direct" + }, + "$set_once": { + "$initial_os": "Mac OS X", + "$initial_browser": "Chrome", + "$initial_device_type": "Desktop", + "$initial_current_url": "http://localhost:8000/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI#activeTab=feed&state=eyJ0eXBlIjoiZG9jIiwiY29udGVudCI6W3sidHlwZSI6ImhlYWRpbmciLCJhdHRycyI6eyJsZXZlbCI6MX0sImNvbnRlbnQiOlt7InR5cGUiOiJ0ZXh0IiwidGV4dCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkifV19LHsidHlwZSI6InBoLXBlcnNvbi1mZWVkIiwiYXR0cnMiOnsiaGVpZ2h0IjpudWxsLCJ0aXRsZSI6bnVsbCwibm9kZUlkIjoiNmQ0ODUwNjYtZWM5OS00ODNkLThiOTgtNGQ4YTJkYzljYzRiIiwiaWQiOiJrclJmZ29wc1V5MVR3TlUxeWhTRExxTkRUZDZONXRNWnJIbG04bkxKSURJIiwiX19pbml0IjpudWxsLCJjaGlsZHJlbiI6W3sidHlwZSI6InBoLXBlcnNvbiIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6IjE0NDk2Y2ExLWUyNTUtNDlkMS1hM2IzLWU4ZjA1NzhhYjQ5YiJ9fSx7InR5cGUiOiJwaC1tYXAiLCJhdHRycyI6eyJpZCI6ImtyUmZnb3BzVXkxVHdOVTF5aFNETHFORFRkNk41dE1ackhsbThuTEpJREkiLCJub2RlSWQiOiJmMzgxYmFkOC1kMTNkLTQyOGItODYyNC03YTFkNTBhODBkMjMifX0seyJ0eXBlIjoicGgtcHJvcGVydGllcyIsImF0dHJzIjp7ImlkIjoia3JSZmdvcHNVeTFUd05VMXloU0RMcU5EVGQ2TjV0TVpySGxtOG5MSklESSIsIm5vZGVJZCI6ImE5Y2ZlOTlhLTRiZTktNDU1YS04OWMyLTViZGUyMGY2MWQwMiJ9fV19fV19", + "$initial_pathname": "/person/krRfgopsUy1TwNU1yhSDLqNDTd6N5tMZrHlm8nLJIDI", + "$initial_browser_version": 117, + "$initial_referrer": "$direct", + "$initial_referring_domain": "$direct" + }, + "$sent_at": "2023-10-20T11:05:50.975000+00:00", + "$geoip_city_name": "Sydney", + "$geoip_country_name": "Australia", + "$geoip_country_code": "AU", + "$geoip_continent_name": "Oceania", + "$geoip_continent_code": "OC", + "$geoip_postal_code": "2000", + "$geoip_latitude": -33.8715, + "$geoip_longitude": 151.2006, + "$geoip_time_zone": "Australia/Sydney", + "$geoip_subdivision_1_code": "NSW", + "$geoip_subdivision_1_name": "New South Wales", + "$plugins_succeeded": ["GeoIP (1)"], + "$plugins_failed": [], + "$plugins_deferred": [], + "$group_3": "018a92f8-b602-0000-75de-4b9073693531", + "$group_2": "018a92f8-afff-0000-efec-ca77de39e384", + "$group_4": "cus_OdSgxxWtHOtq9g", + "$group_1": "http://localhost:8000" + }, + "timestamp": "2023-10-20T11:05:50.993000+00:00", + "uuid": null + } + ], + "recording_duration_s": 2114.0, + "sessionId": "018b4ca3-1d07-7195-8202-c8a103a9325a" + } + ], + "timings": [ + { + "k": "./build_ast", + "t": 0.023964583000633866 + }, + { + "k": "./query", + "t": 4.433299181982875e-5 + }, + { + "k": "./replace_placeholders", + "t": 0.00022916699526831508 + }, + { + "k": "./max_limit", + "t": 4.520895890891552e-5 + }, + { + "k": "./hogql/prepare_ast/clone", + "t": 0.0003998330212198198 + }, + { + "k": "./hogql/prepare_ast/create_hogql_database", + "t": 0.026486458024010062 + }, + { + "k": "./hogql/prepare_ast/resolve_types", + "t": 0.0021655409946106374 + }, + { + "k": "./hogql/prepare_ast", + "t": 0.02919254096923396 + }, + { + "k": "./hogql/print_ast/printer", + "t": 0.001116375089623034 + }, + { + "k": "./hogql/print_ast", + "t": 0.0012727080029435456 + }, + { + "k": "./hogql", + "t": 0.03451150003820658 + }, + { + "k": "./print_ast/create_hogql_database", + "t": 0.020703041984234005 + }, + { + "k": "./print_ast/resolve_types", + "t": 0.0019762919982895255 + }, + { + "k": "./print_ast/resolve_property_types", + "t": 0.0009936250280588865 + }, + { + "k": "./print_ast/resolve_lazy_tables", + "t": 0.001264624996110797 + }, + { + "k": "./print_ast/printer", + "t": 0.0024609589600004256 + }, + { + "k": "./print_ast", + "t": 0.027551917009986937 + }, + { + "k": "./clickhouse_execute", + "t": 0.65883087500697 + }, + { + "k": ".", + "t": 0.7931497080135159 + } + ] +} diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/notebookNodePersonFeedLogic.ts b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/notebookNodePersonFeedLogic.ts new file mode 100644 index 0000000000000..1768b80caf50e --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePersonFeed/notebookNodePersonFeedLogic.ts @@ -0,0 +1,45 @@ +import { kea, key, path, props, afterMount } from 'kea' +import { loaders } from 'kea-loaders' + +// import { query } from '~/queries/query' +// import { +// // NodeKind, +// // SessionsTimelineQuery, +// SessionsTimelineQueryResponse, +// } from '~/queries/schema' + +import mockSessionsTimelineQueryResponse from './mockSessionsTimelineQueryResponse.json' + +import type { notebookNodePersonFeedLogicType } from './notebookNodePersonFeedLogicType' + +export type NotebookNodePersonFeedLogicProps = { + personId: string +} + +export const notebookNodePersonFeedLogic = kea([ + props({} as NotebookNodePersonFeedLogicProps), + path((key) => ['scenes', 'notebooks', 'Notebook', 'Nodes', 'notebookNodePersonFeedLogic', key]), + key(({ personId }) => personId), + + loaders(() => ({ + sessions: [ + // null as SessionsTimelineQueryResponse['results'] | null, + null as any | null, + { + loadSessionsTimeline: async () => { + // const result = await query({ + // kind: NodeKind.SessionsTimelineQuery, + // after: '2021-01-01T18:00:00Z', + // before: '2024-01-01T06:00:00Z', + // personId: props.personId, + // }) + const result = mockSessionsTimelineQueryResponse + return result.results + }, + }, + ], + })), + afterMount(({ actions }) => { + actions.loadSessionsTimeline() + }), +]) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx index 06f018a156c3d..20509743f3ca9 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodePlaylist.tsx @@ -6,7 +6,7 @@ import { getDefaultFilters, sessionRecordingsPlaylistLogic, } from 'scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic' -import { useActions, useValues } from 'kea' +import { BuiltLogic, useActions, useValues } from 'kea' import { useEffect, useMemo, useState } from 'react' import { fromParamsGivenUrl } from 'lib/utils' import { urls } from 'scenes/urls' @@ -17,6 +17,7 @@ import { ErrorBoundary } from '@sentry/react' import { SessionRecordingsPlaylist } from 'scenes/session-recordings/playlist/SessionRecordingsPlaylist' import { sessionRecordingPlayerLogic } from 'scenes/session-recordings/player/sessionRecordingPlayerLogic' import { IconComment } from 'lib/lemon-ui/icons' +import { sessionRecordingPlayerLogicType } from 'scenes/session-recordings/player/sessionRecordingPlayerLogicType' const Component = ({ attributes, @@ -55,6 +56,11 @@ const Component = ({ const { activeSessionRecording } = useValues(logic) const { setSelectedRecordingId } = useActions(logic) + const getReplayLogic = ( + sessionRecordingId?: string + ): BuiltLogic | null | undefined => + sessionRecordingId ? sessionRecordingPlayerLogic.findMounted({ playerKey, sessionRecordingId }) : null + useEffect(() => { setActions( activeSessionRecording @@ -62,10 +68,15 @@ const Component = ({ { text: 'View replay', onClick: () => { + getReplayLogic(activeSessionRecording.id)?.actions.setPause() + insertAfter({ type: NotebookNodeType.Recording, attrs: { id: String(activeSessionRecording.id), + __init: { + expanded: true, + }, }, }) }, @@ -93,7 +104,7 @@ const Component = ({ setTimeout(() => { // NOTE: This is a hack but we need a delay to give time for the player to mount - sessionRecordingPlayerLogic.findMounted({ playerKey, sessionRecordingId })?.actions.seekToTime(time) + getReplayLogic(sessionRecordingId)?.actions.seekToTime(time) }, 100) }, }) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeProperties.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeProperties.tsx new file mode 100644 index 0000000000000..e6e0b63f70713 --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeProperties.tsx @@ -0,0 +1,66 @@ +import { NotebookNodeType, PropertyDefinitionType } from '~/types' +import { createPostHogWidgetNode } from 'scenes/notebooks/Nodes/NodeWrapper' +import { LemonLabel, LemonSkeleton } from '@posthog/lemon-ui' +import { PropertyKeyInfo } from 'lib/components/PropertyKeyInfo' +import { PropertiesTable } from 'lib/components/PropertiesTable' +import { useValues } from 'kea' +import { personLogic } from 'scenes/persons/personLogic' +import { NotebookNodeProps } from '../Notebook/utils' +import { NotFound } from 'lib/components/NotFound' +import { notebookNodeLogic } from './notebookNodeLogic' + +const Component = ({ attributes }: NotebookNodeProps): JSX.Element | null => { + const { id } = attributes + + const { expanded } = useValues(notebookNodeLogic) + + const logic = personLogic({ id }) + const { person, personLoading } = useValues(logic) + + if (personLoading) { + return + } else if (!person) { + return + } + + const numProperties = Object.keys(person.properties).length + + if (!expanded) { + return null + } + + return ( +
+ {Object.entries(person.properties).map(([key, value], index) => { + const isLast = index === numProperties - 1 + + return ( +
+ + + +
+ +
+
+ ) + })} +
+ ) +} + +type NotebookNodePropertiesAttributes = { + id: string +} + +export const NotebookNodeProperties = createPostHogWidgetNode({ + nodeType: NotebookNodeType.Properties, + titlePlaceholder: 'Properties', + Component, + resizeable: false, + expandable: true, + startExpanded: true, + attributes: { + id: {}, + }, +}) diff --git a/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx b/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx index 245a52e07dfd5..a104932395c83 100644 --- a/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx +++ b/frontend/src/scenes/notebooks/Nodes/NotebookNodeRecording.tsx @@ -25,7 +25,7 @@ import { IconComment, IconPerson } from 'lib/lemon-ui/icons' import { NotFound } from 'lib/components/NotFound' const HEIGHT = 500 -const MIN_HEIGHT = 400 +const MIN_HEIGHT = '20rem' const Component = ({ attributes }: NotebookNodeProps): JSX.Element => { const { id, noInspector } = attributes diff --git a/frontend/src/scenes/notebooks/Nodes/components/NotebookNodeEmptyState.tsx b/frontend/src/scenes/notebooks/Nodes/components/NotebookNodeEmptyState.tsx new file mode 100644 index 0000000000000..8dd8c292407f3 --- /dev/null +++ b/frontend/src/scenes/notebooks/Nodes/components/NotebookNodeEmptyState.tsx @@ -0,0 +1,11 @@ +type NotebookNodeEmptyStateProps = { + message: string +} + +export function NotebookNodeEmptyState({ message }: NotebookNodeEmptyStateProps): JSX.Element { + return ( +
+ {message} +
+ ) +} diff --git a/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts b/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts index 27ee45c70b083..44853bd6777bf 100644 --- a/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts +++ b/frontend/src/scenes/notebooks/Nodes/notebookNodeLogic.ts @@ -19,38 +19,30 @@ import { CustomNotebookNodeAttributes, JSONContent, Node, - NotebookNode, NotebookNodeAction, NotebookNodeAttributeProperties, NotebookNodeAttributes, NotebookNodeSettings, } from '../Notebook/utils' -import { NotebookNodeType } from '~/types' +import { NotebookNodeResource, NotebookNodeType } from '~/types' import posthog from 'posthog-js' import { NotebookNodeMessages, NotebookNodeMessagesListeners } from './messaging/notebook-node-messages' export type NotebookNodeLogicProps = { - node: NotebookNode - nodeId: string nodeType: NotebookNodeType notebookLogic: BuiltLogic - getPos: () => number - resizeable: boolean | ((attributes: CustomNotebookNodeAttributes) => boolean) - settings: NotebookNodeSettings + getPos?: () => number + resizeable?: boolean | ((attributes: CustomNotebookNodeAttributes) => boolean) + settings?: NotebookNodeSettings messageListeners?: NotebookNodeMessagesListeners - startExpanded: boolean + startExpanded?: boolean titlePlaceholder: string } & NotebookNodeAttributeProperties -const computeResizeable = ( - resizeable: NotebookNodeLogicProps['resizeable'], - attrs: NotebookNodeLogicProps['attributes'] -): boolean => (typeof resizeable === 'function' ? resizeable(attrs) : resizeable) - export const notebookNodeLogic = kea([ props({} as NotebookNodeLogicProps), path((key) => ['scenes', 'notebooks', 'Notebook', 'Nodes', 'notebookNodeLogic', key]), - key(({ nodeId }) => nodeId || 'no-node-id-set'), + key(({ attributes }) => attributes.nodeId || 'no-node-id-set'), actions({ setExpanded: (expanded: boolean) => ({ expanded }), setResizeable: (resizeable: boolean) => ({ resizeable }), @@ -62,6 +54,7 @@ export const notebookNodeLogic = kea([ timestamp, sessionRecordingId, }), + insertOrSelectNextLine: true, setPreviousNode: (node: Node | null) => ({ node }), setNextNode: (node: Node | null) => ({ node }), deleteNode: true, @@ -74,13 +67,13 @@ export const notebookNodeLogic = kea([ }), connect((props: NotebookNodeLogicProps) => ({ - actions: [props.notebookLogic, ['onUpdateEditor']], - values: [props.notebookLogic, ['editor']], + actions: [props.notebookLogic, ['onUpdateEditor', 'setTextSelection']], + values: [props.notebookLogic, ['editor', 'isEditable']], })), reducers(({ props }) => ({ expanded: [ - props.startExpanded, + props.startExpanded ?? true, { setExpanded: (_, { expanded }) => expanded, }, @@ -127,11 +120,15 @@ export const notebookNodeLogic = kea([ selectors({ notebookLogic: [(_, p) => [p.notebookLogic], (notebookLogic) => notebookLogic], nodeAttributes: [(_, p) => [p.attributes], (nodeAttributes) => nodeAttributes], - settings: [(_, p) => [p.settings], (settings) => settings], + nodeId: [(_, p) => [p.attributes], (nodeAttributes): string => nodeAttributes.nodeId], + settings: [() => [(_, props) => props], (props): NotebookNodeSettings | null => props.settings ?? null], + title: [ (s) => [s.titlePlaceholder, s.nodeAttributes], (titlePlaceholder, nodeAttributes) => nodeAttributes.title || titlePlaceholder, ], + // TODO: Fix the typing of nodeAttributes + children: [(s) => [s.nodeAttributes], (nodeAttributes): NotebookNodeResource[] => nodeAttributes.children], sendMessage: [ (s) => [s.messageListeners], @@ -153,29 +150,42 @@ export const notebookNodeLogic = kea([ listeners(({ actions, values, props }) => ({ onUpdateEditor: async () => { + if (!props.getPos) { + return + } const editor = values.notebookLogic.values.editor if (editor) { - const pos = props.getPos() - const { previous, next } = editor.getAdjacentNodes(pos) + const { previous, next } = editor.getAdjacentNodes(props.getPos()) actions.setPreviousNode(previous) actions.setNextNode(next) } }, insertAfter: ({ content }) => { + if (!props.getPos) { + return + } const logic = values.notebookLogic logic.values.editor?.insertContentAfterNode(props.getPos(), content) }, deleteNode: () => { + if (!props.getPos) { + // TODO: somehow make this delete from the parent + return + } + const logic = values.notebookLogic - logic.values.editor?.deleteRange({ from: props.getPos(), to: props.getPos() + props.node.nodeSize }).run() - if (values.notebookLogic.values.editingNodeId === props.nodeId) { + logic.values.editor?.deleteRange({ from: props.getPos(), to: props.getPos() + 1 }).run() + if (values.notebookLogic.values.editingNodeId === values.nodeId) { values.notebookLogic.actions.setEditingNodeId(null) } }, selectNode: () => { + if (!props.getPos) { + return + } const editor = values.notebookLogic.values.editor if (editor) { @@ -185,23 +195,45 @@ export const notebookNodeLogic = kea([ }, scrollIntoView: () => { + if (!props.getPos) { + return + } values.editor?.scrollToPosition(props.getPos()) }, insertAfterLastNodeOfType: ({ nodeType, content }) => { + if (!props.getPos) { + return + } const insertionPosition = props.getPos() values.notebookLogic.actions.insertAfterLastNodeOfType(nodeType, content, insertionPosition) }, insertReplayCommentByTimestamp: ({ timestamp, sessionRecordingId }) => { + if (!props.getPos) { + return + } const insertionPosition = props.getPos() values.notebookLogic.actions.insertReplayCommentByTimestamp({ timestamp, sessionRecordingId, knownStartingPosition: insertionPosition, - nodeId: props.nodeId, + nodeId: values.nodeId, }) }, + insertOrSelectNextLine: () => { + if (!props.getPos || !values.isEditable) { + return + } + + if (!values.nextNode || !values.nextNode.isTextblock) { + actions.insertAfter({ + type: 'paragraph', + }) + } else { + actions.setTextSelection(props.getPos() + 1) + } + }, setExpanded: ({ expanded }) => { if (expanded) { @@ -217,7 +249,7 @@ export const notebookNodeLogic = kea([ }, toggleEditing: () => { props.notebookLogic.actions.setEditingNodeId( - props.notebookLogic.values.editingNodeId === props.nodeId ? null : props.nodeId + props.notebookLogic.values.editingNodeId === values.nodeId ? null : values.nodeId ) }, initializeNode: () => { @@ -236,14 +268,19 @@ export const notebookNodeLogic = kea([ })), afterMount(async (logic) => { - logic.props.notebookLogic.actions.registerNodeLogic(logic as any) - const resizeable = computeResizeable(logic.props.resizeable, logic.props.attributes) - logic.actions.setResizeable(resizeable) - logic.actions.initializeNode() + const { props, actions, values } = logic + props.notebookLogic.actions.registerNodeLogic(values.nodeId, logic as any) + + const isResizeable = + typeof props.resizeable === 'function' ? props.resizeable(props.attributes) : props.resizeable ?? true + + actions.setResizeable(isResizeable) + actions.initializeNode() }), - beforeUnmount((logic) => { - logic.props.notebookLogic.actions.unregisterNodeLogic(logic as any) + beforeUnmount(({ props, values }) => { + // Note this doesn't work as there may be other places where this is used. The NodeWrapper should be in charge of somehow unmounting this + props.notebookLogic.actions.unregisterNodeLogic(values.nodeId) }), ]) diff --git a/frontend/src/scenes/notebooks/Notebook/Editor.tsx b/frontend/src/scenes/notebooks/Notebook/Editor.tsx index 39c6c29115958..feaffd333ae78 100644 --- a/frontend/src/scenes/notebooks/Notebook/Editor.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Editor.tsx @@ -8,6 +8,8 @@ import { FloatingMenu } from '@tiptap/extension-floating-menu' import StarterKit from '@tiptap/starter-kit' import ExtensionPlaceholder from '@tiptap/extension-placeholder' import ExtensionDocument from '@tiptap/extension-document' +import TaskItem from '@tiptap/extension-task-item' +import TaskList from '@tiptap/extension-task-list' import { NotebookNodeFlagCodeExample } from '../Nodes/NotebookNodeFlagCodeExample' import { NotebookNodeFlag } from '../Nodes/NotebookNodeFlag' @@ -36,6 +38,9 @@ import { notebookLogic } from './notebookLogic' import { sampleOne } from 'lib/utils' import { NotebookNodeGroup } from '../Nodes/NotebookNodeGroup' import { NotebookNodeCohort } from '../Nodes/NotebookNodeCohort' +import { NotebookNodePersonFeed } from '../Nodes/NotebookNodePersonFeed/NotebookNodePersonFeed' +import { NotebookNodeProperties } from '../Nodes/NotebookNodeProperties' +import { NotebookNodeMap } from '../Nodes/NotebookNodeMap' const CustomDocument = ExtensionDocument.extend({ content: 'heading block*', @@ -46,7 +51,7 @@ const PLACEHOLDER_TITLES = ['Release notes', 'Product roadmap', 'Meeting notes', export function Editor(): JSX.Element { const editorRef = useRef() - const { shortId } = useValues(notebookLogic) + const { shortId, mode } = useValues(notebookLogic) const { setEditor, onEditorUpdate, onEditorSelectionUpdate } = useActions(notebookLogic) const { resetSuggestions, setPreviousNode } = useActions(insertionSuggestionsLogic) @@ -62,7 +67,7 @@ export function Editor(): JSX.Element { const _editor = useEditor({ extensions: [ - CustomDocument, + mode === 'notebook' ? CustomDocument : ExtensionDocument, StarterKit.configure({ document: false, gapcursor: false, @@ -94,6 +99,10 @@ export function Editor(): JSX.Element { } }, }), + TaskList, + TaskItem.configure({ + nested: true, + }), NotebookMarkLink, NotebookNodeBacklink, NotebookNodeQuery, @@ -109,9 +118,12 @@ export function Editor(): JSX.Element { NotebookNodeEarlyAccessFeature, NotebookNodeSurvey, NotebookNodeImage, + NotebookNodeProperties, SlashCommandsExtension, BacklinkCommandsExtension, NodeGapInsertionExtension, + NotebookNodePersonFeed, + NotebookNodeMap, ], editorProps: { handleDrop: (view, event, _slice, moved) => { diff --git a/frontend/src/scenes/notebooks/Notebook/InlineMenu.tsx b/frontend/src/scenes/notebooks/Notebook/InlineMenu.tsx index 275b4e3febde0..8df8854a0843e 100644 --- a/frontend/src/scenes/notebooks/Notebook/InlineMenu.tsx +++ b/frontend/src/scenes/notebooks/Notebook/InlineMenu.tsx @@ -3,10 +3,12 @@ import { Editor, isTextSelection } from '@tiptap/core' import { BubbleMenu } from '@tiptap/react' import { IconBold, IconDelete, IconItalic, IconLink, IconOpenInNew } from 'lib/lemon-ui/icons' import { isURL } from 'lib/utils' +import { useRef } from 'react' import NotebookIconHeading from './NotebookIconHeading' export const InlineMenu = ({ editor }: { editor: Editor }): JSX.Element => { const { href, target } = editor.getAttributes('link') + const menuRef = useRef(null) const setLink = (href: string): void => { editor.commands.setMark('link', { href: href }) @@ -19,25 +21,29 @@ export const InlineMenu = ({ editor }: { editor: Editor }): JSX.Element => { return ( { - const hasEditorFocus = view.hasFocus() + shouldShow={({ editor: { isEditable }, view, state, from, to }) => { + const isChildOfMenu = menuRef.current?.contains(document.activeElement) + const focused = view.hasFocus() || isChildOfMenu const isTextBlock = isTextSelection(state.selection) - if (!hasEditorFocus || !editor.isEditable || !isTextBlock) { + if (!focused || !isEditable || !isTextBlock) { return false } return state.doc.textBetween(from, to).length > 0 }} > -
+
{editor.isActive('link') ? ( <> diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.scss b/frontend/src/scenes/notebooks/Notebook/Notebook.scss index eba73b22bf1a3..3adb654c29b47 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.scss +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.scss @@ -2,6 +2,7 @@ flex: 1; display: flex; flex-direction: column; + width: 100%; .NotebookEditor { flex: 1; @@ -36,12 +37,40 @@ height: 0; } - > ul { + > ol { + list-style-type: decimal; + } + + ul { list-style-type: disc; } - > ol { - list-style-type: decimal; + > ul[data-type='taskList'] { + list-style-type: none; + padding-left: 0px; + + li { + display: flex; + + > label { + flex: 0 0 auto; + margin-right: 0.5rem; + user-select: none; + } + + > div { + flex: 1 1 auto; + } + + ul li, + ol li { + display: list-item; + } + + ul[data-type='taskList'] > li { + display: flex; + } + } } > ul, @@ -123,47 +152,76 @@ } } - .NotebookSidebar { + .NotebookColumn { position: relative; width: 0px; transition: width var(--notebook-popover-transition-properties); --notebook-sidebar-height: calc(100vh - 9rem); - .NotebookScene & { - --notebook-sidebar-height: calc(100vh - 11rem); - } - - .NotebookSidebar__content { + .NotebookColumn__content { position: sticky; align-self: flex-start; top: 0px; - width: var(--notebook-sidebar-width); + transform: translateX(-100%); transition: transform var(--notebook-popover-transition-properties); + } + + .NotebookScene &, + .NotebookPopover & { + --notebook-sidebar-height: calc(100vh - 11rem); + + .NotebookColumn__padding { + // Account for fixed title + height: 4rem; + } + } - .NotebookScene & { - // Account for sticky header + .NotebookScene & { + .NotebookColumn__content { + // Account for static header top: 4rem; } } + &--left { + .NotebookColumn__content { + width: var(--notebook-column-left-width); + transform: translateX(-100%); + } + } + + &--right { + .NotebookColumn__content { + width: var(--notebook-column-right-width); + transform: translateX(100%); + } + } + &--showing { - width: var(--notebook-sidebar-width); - margin-right: 1rem; + &.NotebookColumn--left { + width: var(--notebook-column-left-width); + margin-right: 1rem; + } + + &.NotebookColumn--right { + width: var(--notebook-column-right-width); + margin-left: 1rem; + } - .NotebookSidebar__content { + .NotebookColumn__content { transform: translateX(0); } } + } - .NotebookHistory { - flex: 1; - display: flex; - flex-direction: column; - height: var(--notebook-sidebar-height); - overflow: hidden; - } + .NotebookHistory { + flex: 1; + display: flex; + flex-direction: column; + height: var(--notebook-sidebar-height); + overflow: hidden; } .NotebookInlineMenu { @@ -176,7 +234,7 @@ } } - .NotebookSidebar__widget { + .NotebookColumnLeft__widget { > .LemonWidget__content { max-height: calc(100vh - 220px); overflow: auto; diff --git a/frontend/src/scenes/notebooks/Notebook/Notebook.tsx b/frontend/src/scenes/notebooks/Notebook/Notebook.tsx index 43146b75b270f..f320808bf90c9 100644 --- a/frontend/src/scenes/notebooks/Notebook/Notebook.tsx +++ b/frontend/src/scenes/notebooks/Notebook/Notebook.tsx @@ -1,5 +1,5 @@ import { useEffect } from 'react' -import { notebookLogic } from 'scenes/notebooks/Notebook/notebookLogic' +import { NotebookLogicProps, notebookLogic } from 'scenes/notebooks/Notebook/notebookLogic' import { BindLogic, useActions, useValues } from 'kea' import './Notebook.scss' @@ -11,24 +11,39 @@ import { SCRATCHPAD_NOTEBOOK } from '~/models/notebooksModel' import { NotebookConflictWarning } from './NotebookConflictWarning' import { NotebookLoadingState } from './NotebookLoadingState' import { Editor } from './Editor' -import { EditorFocusPosition } from './utils' -import { NotebookSidebar } from './NotebookSidebar' +import { EditorFocusPosition, JSONContent } from './utils' +import { NotebookColumnLeft } from './NotebookColumnLeft' import { ErrorBoundary } from '~/layout/ErrorBoundary' import { NotebookHistoryWarning } from './NotebookHistory' import { useWhyDidIRender } from 'lib/hooks/useWhyDidIRender' +import { NotebookColumnRight } from './NotebookColumnRight' -export type NotebookProps = { - shortId: string - editable?: boolean +export type NotebookProps = NotebookLogicProps & { initialAutofocus?: EditorFocusPosition + initialContent?: JSONContent + editable?: boolean } -export function Notebook({ shortId, editable = false, initialAutofocus = 'start' }: NotebookProps): JSX.Element { - const logic = notebookLogic({ shortId }) - const { notebook, notebookLoading, editor, conflictWarningVisible, isEditable } = useValues(logic) - const { duplicateNotebook, loadNotebook, setEditable } = useActions(logic) +export function Notebook({ + shortId, + mode, + editable = true, + initialAutofocus = 'start', + initialContent, +}: NotebookProps): JSX.Element { + const logicProps: NotebookLogicProps = { shortId, mode } + const logic = notebookLogic(logicProps) + const { notebook, notebookLoading, editor, conflictWarningVisible, isEditable, isTemplate, notebookMissing } = + useValues(logic) + const { duplicateNotebook, loadNotebook, setEditable, setLocalContent } = useActions(logic) const { isExpanded } = useValues(notebookSettingsLogic) + useEffect(() => { + if (initialContent && mode === 'canvas') { + setLocalContent(initialContent) + } + }, [notebook]) + useWhyDidIRender('Notebook', { notebook, notebookLoading, @@ -36,7 +51,6 @@ export function Notebook({ shortId, editable = false, initialAutofocus = 'start' conflictWarningVisible, isEditable, shortId, - editable, initialAutofocus, }) @@ -66,14 +80,21 @@ export function Notebook({ shortId, editable = false, initialAutofocus = 'start' return } else if (!notebook && notebookLoading) { return - } else if (!notebook) { + } else if (notebookMissing) { return } return ( - -
- {notebook.is_template && ( + +
+ {isTemplate && ( - + +
diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookSidebar.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookColumnLeft.tsx similarity index 70% rename from frontend/src/scenes/notebooks/Notebook/NotebookSidebar.tsx rename to frontend/src/scenes/notebooks/Notebook/NotebookColumnLeft.tsx index d321be5d743c1..d3edc6569eb6b 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookSidebar.tsx +++ b/frontend/src/scenes/notebooks/Notebook/NotebookColumnLeft.tsx @@ -7,19 +7,20 @@ import { LemonButton } from '@posthog/lemon-ui' import { IconEyeVisible } from 'lib/lemon-ui/icons' import { NotebookHistory } from './NotebookHistory' -export const NotebookSidebar = (): JSX.Element | null => { - const { editingNodeLogic, isShowingSidebar, showHistory } = useValues(notebookLogic) +export const NotebookColumnLeft = (): JSX.Element | null => { + const { editingNodeLogic, isShowingLeftColumn, showHistory } = useValues(notebookLogic) return (
-
- {isShowingSidebar ? ( +
+
+ {isShowingLeftColumn ? ( editingNodeLogic ? ( - + ) : showHistory ? ( ) : null @@ -29,7 +30,7 @@ export const NotebookSidebar = (): JSX.Element | null => { ) } -const Widgets = ({ logic }: { logic: BuiltLogic }): JSX.Element => { +const NodeSettings = ({ logic }: { logic: BuiltLogic }): JSX.Element => { const { setEditingNodeId } = useActions(notebookLogic) const { settings: Settings, nodeAttributes, title } = useValues(logic) const { updateAttributes, selectNode } = useActions(logic) @@ -37,7 +38,7 @@ const Widgets = ({ logic }: { logic: BuiltLogic }): JSX.E return ( } size="small" status="primary" onClick={() => selectNode()} /> diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookColumnRight.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookColumnRight.tsx new file mode 100644 index 0000000000000..6ccb797affb76 --- /dev/null +++ b/frontend/src/scenes/notebooks/Notebook/NotebookColumnRight.tsx @@ -0,0 +1,50 @@ +import { BuiltLogic, useValues } from 'kea' +import clsx from 'clsx' +import { notebookLogic } from './notebookLogic' +import { notebookNodeLogicType } from '../Nodes/notebookNodeLogicType' +import { NotebookNodeChildRenderer } from '../Nodes/NodeWrapper' +import { uuid } from 'lib/utils' + +export const NotebookColumnRight = (): JSX.Element | null => { + const { isShowingLeftColumn, nodeLogicsWithChildren } = useValues(notebookLogic) + const isShowing = nodeLogicsWithChildren.length && !isShowingLeftColumn + + return ( +
+
+
+ {isShowing ? ( + <> + {nodeLogicsWithChildren.map((x, i) => ( + + ))} + + ) : null} +
+
+ ) +} + +const Widgets = ({ nodeLogic }: { nodeLogic: BuiltLogic }): JSX.Element => { + const { children } = useValues(nodeLogic) + + // TODO: IMPORTANT: The nodeId is basically now required, so we should be checking that in the logic + // otherwise we end up in horrible re-rendering loops + children.forEach((content) => { + if (!content.attrs.nodeId) { + content.attrs.nodeId = uuid() + } + }) + + return ( + <> + {children?.map((child) => ( + + ))} + + ) +} diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookPopover.scss b/frontend/src/scenes/notebooks/Notebook/NotebookPopover.scss index db98f2d008427..b191950decce5 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookPopover.scss +++ b/frontend/src/scenes/notebooks/Notebook/NotebookPopover.scss @@ -80,7 +80,7 @@ &--with-sidebar { // TODO: Sync this with the sidebar width itself .NotebookPopover__content { - width: calc(50rem + var(--notebook-sidebar-width)); + width: calc(50rem + var(--notebook-column-left-width)); } } diff --git a/frontend/src/scenes/notebooks/Notebook/NotebookPopover.tsx b/frontend/src/scenes/notebooks/Notebook/NotebookPopover.tsx index 65679bd924778..f820f876cce36 100644 --- a/frontend/src/scenes/notebooks/Notebook/NotebookPopover.tsx +++ b/frontend/src/scenes/notebooks/Notebook/NotebookPopover.tsx @@ -111,7 +111,7 @@ export function NotebookPopoverCard(): JSX.Element | null { export function NotebookPopover(): JSX.Element { const { visibility, fullScreen, selectedNotebook, dropProperties } = useValues(notebookPopoverLogic) const { setVisibility, setFullScreen, setElementRef } = useActions(notebookPopoverLogic) - const { isShowingSidebar } = useValues(notebookLogic({ shortId: selectedNotebook })) + const { isShowingLeftColumn } = useValues(notebookLogic({ shortId: selectedNotebook })) const ref = useRef(null) @@ -145,7 +145,7 @@ export function NotebookPopover(): JSX.Element { 'NotebookPopover', `NotebookPopover--${visibility}`, fullScreen && 'NotebookPopover--full-screen', - isShowingSidebar && 'NotebookPopover--with-sidebar' + isShowingLeftColumn && 'NotebookPopover--with-sidebar' )} >
boolean, fn: () => any): Promise { @@ -59,7 +63,7 @@ async function runWhenEditorIsReady(waitForEditor: () => boolean, fn: () => any) export const notebookLogic = kea([ props({} as NotebookLogicProps), path((key) => ['scenes', 'notebooks', 'Notebook', 'notebookLogic', key]), - key(({ shortId }) => shortId), + key(({ shortId, mode }) => `${shortId}-${mode}`), connect(() => ({ values: [notebooksModel, ['scratchpadNotebook', 'notebookTemplates']], actions: [notebooksModel, ['receiveNotebookUpdate']], @@ -79,8 +83,8 @@ export const notebookLogic = kea([ exportJSON: true, showConflictWarning: true, onUpdateEditor: true, - registerNodeLogic: (nodeLogic: BuiltLogic) => ({ nodeLogic }), - unregisterNodeLogic: (nodeLogic: BuiltLogic) => ({ nodeLogic }), + registerNodeLogic: (nodeId: string, nodeLogic: BuiltLogic) => ({ nodeId, nodeLogic }), + unregisterNodeLogic: (nodeId: string) => ({ nodeId }), setEditable: (editable: boolean) => ({ editable }), scrollToSelection: true, pasteAfterLastNode: (content: string) => ({ @@ -104,10 +108,10 @@ export const notebookLogic = kea([ setShowHistory: (showHistory: boolean) => ({ showHistory }), setTextSelection: (selection: number | EditorRange) => ({ selection }), }), - reducers({ + reducers(({ props }) => ({ localContent: [ null as JSONContent | null, - { persist: true, prefix: NOTEBOOKS_VERSION }, + { persist: props.mode === 'notebook', prefix: NOTEBOOKS_VERSION }, { setLocalContent: (_, { jsonContent }) => jsonContent, clearLocalContent: () => null, @@ -148,20 +152,20 @@ export const notebookLogic = kea([ nodeLogics: [ {} as Record>, { - registerNodeLogic: (state, { nodeLogic }) => { - if (nodeLogic.props.nodeId === null) { + registerNodeLogic: (state, { nodeId, nodeLogic }) => { + if (nodeId === null) { return state } else { return { ...state, - [nodeLogic.props.nodeId]: nodeLogic, + [nodeId]: nodeLogic, } } }, - unregisterNodeLogic: (state, { nodeLogic }) => { + unregisterNodeLogic: (state, { nodeId }) => { const newState = { ...state } - if (nodeLogic.props.nodeId !== null) { - delete newState[nodeLogic.props.nodeId] + if (nodeId !== null) { + delete newState[nodeId] } return newState }, @@ -179,13 +183,17 @@ export const notebookLogic = kea([ setShowHistory: (_, { showHistory }) => showHistory, }, ], - }), + })), loaders(({ values, props, actions }) => ({ notebook: [ null as NotebookType | null, { loadNotebook: async () => { - let response: NotebookType | null + let response: NotebookType | null = null + + if (values.mode !== 'notebook') { + return null + } if (props.shortId === SCRATCHPAD_NOTEBOOK.short_id) { response = { @@ -250,25 +258,30 @@ export const notebookLogic = kea([ null as NotebookType | null, { duplicateNotebook: async () => { - if (!values.notebook) { + if (!values.content) { return null } // We use the local content if set otherwise the notebook content. That way it supports templates, scratchpad etc. const response = await api.notebooks.create({ - content: values.content || values.notebook.content, + content: values.content, text_content: values.editor?.getText() || '', - title: values.title || values.notebook.title, + title: values.title, }) posthog.capture(`notebook duplicated`, { short_id: response.short_id, }) - const source = values.notebook.short_id === 'scratchpad' ? 'Scratchpad' : 'Template' + const source = + values.mode === 'canvas' + ? 'Canvas' + : values.notebook?.short_id === 'scratchpad' + ? 'Scratchpad' + : 'Template' lemonToast.success(`Notebook created from ${source}!`) - if (values.notebook.short_id === 'scratchpad') { + if (values.notebook?.short_id === 'scratchpad') { // If duplicating the scratchpad, we assume they don't want the scratchpad content anymore actions.clearLocalContent() } @@ -282,7 +295,20 @@ export const notebookLogic = kea([ })), selectors({ shortId: [() => [(_, props) => props], (props): string => props.shortId], - isLocalOnly: [() => [(_, props) => props], (props): boolean => props.shortId === 'scratchpad'], + mode: [() => [(_, props) => props], (props): NotebookLogicMode => props.mode ?? 'notebook'], + isTemplate: [(s) => [s.shortId], (shortId): boolean => shortId.startsWith('template-')], + isLocalOnly: [ + () => [(_, props) => props], + (props): boolean => { + return props.shortId === 'scratchpad' || props.mode === 'canvas' + }, + ], + notebookMissing: [ + (s) => [s.notebook, s.notebookLoading, s.mode], + (notebook, notebookLoading, mode): boolean => { + return (['notebook', 'template'].includes(mode) && !notebook && !notebookLoading) ?? false + }, + ], content: [ (s) => [s.notebook, s.localContent, s.previewContent], (notebook, localContent, previewContent): JSONContent => { @@ -321,7 +347,7 @@ export const notebookLogic = kea([ editingNodeLogic: [ (s) => [s.editingNodeId, s.nodeLogics], (editingNodeId, nodeLogics) => - Object.values(nodeLogics).find((nodeLogic) => nodeLogic.props.nodeId === editingNodeId), + Object.values(nodeLogics).find((nodeLogic) => nodeLogic.values.nodeId === editingNodeId), ], findNodeLogic: [ (s) => [s.nodeLogics], @@ -344,13 +370,22 @@ export const notebookLogic = kea([ findNodeLogicById: [ (s) => [s.nodeLogics], (nodeLogics) => { - return (id: string): notebookNodeLogicType | null => { - return Object.values(nodeLogics).find((nodeLogic) => nodeLogic.props.nodeId === id) ?? null + return (id: string) => { + return Object.values(nodeLogics).find((nodeLogic) => nodeLogic.values.nodeId === id) ?? null } }, ], - isShowingSidebar: [ + nodeLogicsWithChildren: [ + (s) => [s.nodeLogics, s.content], + // eslint-disable-next-line @typescript-eslint/no-unused-vars + (nodeLogics, _content) => { + // NOTE: _content is not but is needed to retrigger as it could mean the children have changed + return Object.values(nodeLogics).filter((nodeLogic) => nodeLogic.props.attributes?.children) + }, + ], + + isShowingLeftColumn: [ (s) => [s.editingNodeId, s.showHistory], (editingNodeId, showHistory) => !!editingNodeId || showHistory, ], @@ -368,7 +403,7 @@ export const notebookLogic = kea([ } }, })), - listeners(({ values, actions, sharedListeners }) => ({ + listeners(({ values, actions, sharedListeners, cache }) => ({ insertAfterLastNode: async ({ content }) => { await runWhenEditorIsReady( () => !!values.editor, @@ -449,6 +484,19 @@ export const notebookLogic = kea([ await breakpoint(SYNC_DELAY) + if (values.mode === 'canvas') { + // TODO: We probably want this to be configurable + cache.lastState = btoa(JSON.stringify(jsonContent)) + router.actions.replace( + router.values.currentLocation.pathname, + router.values.currentLocation.searchParams, + { + ...router.values.currentLocation.hashParams, + state: cache.lastState, + } + ) + } + posthog.capture('notebook content changed', { short_id: values.notebook?.short_id, }) @@ -474,7 +522,7 @@ export const notebookLogic = kea([ }, onEditorUpdate: () => { - if (!values.editor || !values.notebook) { + if (!values.editor) { return } const jsonContent = values.editor.getJSON() @@ -519,4 +567,16 @@ export const notebookLogic = kea([ }) }, })), + + urlToAction(({ values, actions, cache }) => ({ + '*': (_, _search, hashParams) => { + if (values.mode === 'canvas' && hashParams?.state) { + if (cache.lastState === hashParams.state) { + return + } + + actions.setLocalContent(JSON.parse(atob(hashParams.state))) + } + }, + })), ]) diff --git a/frontend/src/scenes/notebooks/Notebook/utils.ts b/frontend/src/scenes/notebooks/Notebook/utils.ts index 2d0427b2a2ca4..edd9242b948e8 100644 --- a/frontend/src/scenes/notebooks/Notebook/utils.ts +++ b/frontend/src/scenes/notebooks/Notebook/utils.ts @@ -10,7 +10,7 @@ import { TextSerializer, } from '@tiptap/core' import { Node as PMNode } from '@tiptap/pm/model' -import { NotebookNodeType } from '~/types' +import { NotebookNodeResource, NotebookNodeType } from '~/types' export interface Node extends PMNode {} export interface JSONContent extends TTJSONContent {} @@ -31,6 +31,8 @@ export type NotebookNodeAttributes = T & expanded?: boolean showSettings?: boolean } + // TODO: Type this more specifically to be our supported nodes only + children?: NotebookNodeResource[] } // NOTE: Pushes users to use the parsed "attributes" instead @@ -122,6 +124,9 @@ export const textContent = (node: any): string => { 'ph-survey': customOrTitleSerializer, 'ph-group': customOrTitleSerializer, 'ph-cohort': customOrTitleSerializer, + 'ph-person-feed': customOrTitleSerializer, + 'ph-properties': customOrTitleSerializer, + 'ph-map': customOrTitleSerializer, } return getText(node, { diff --git a/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx b/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx new file mode 100644 index 0000000000000..894d00344a1b8 --- /dev/null +++ b/frontend/src/scenes/notebooks/NotebookCanvasScene.tsx @@ -0,0 +1,54 @@ +import { SceneExport } from 'scenes/sceneTypes' +import { NotebookLogicProps, notebookLogic } from './Notebook/notebookLogic' +import { Notebook } from './Notebook/Notebook' +import './NotebookScene.scss' +import { useMemo } from 'react' +import { uuid } from 'lib/utils' +import { useActions } from 'kea' +import { LemonBanner } from '@posthog/lemon-ui' +import { useFeatureFlag } from 'lib/hooks/useFeatureFlag' +import { NotFound } from 'lib/components/NotFound' + +export const scene: SceneExport = { + component: NotebookCanvas, +} + +export function NotebookCanvas(): JSX.Element { + const id = useMemo(() => uuid(), []) + + const logicProps: NotebookLogicProps = { + shortId: `canvas-${id}`, + mode: 'canvas', + } + + const { duplicateNotebook } = useActions(notebookLogic(logicProps)) + + const is3000 = useFeatureFlag('POSTHOG_3000') + + if (!is3000) { + return Canvas mode requires PostHog 3000} /> + } + + // TODO: The absolute positioning doesn't work so well in non-3000 mode + + return ( +
+ + This is a canvas. You can change anything you like and it is persisted to the URL for easy + sharing. + +
+
+ +
+
+
+ ) +} diff --git a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.stories.tsx b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.stories.tsx index 1baff1b2871f6..a68a60d9b5bdc 100644 --- a/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.stories.tsx +++ b/frontend/src/scenes/notebooks/NotebookSelectButton/NotebookSelectButton.stories.tsx @@ -1,6 +1,6 @@ import { Meta, StoryFn } from '@storybook/react' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' -import { useFeatureFlags, useStorybookMocks } from '~/mocks/browser' +import { setFeatureFlags, useStorybookMocks } from '~/mocks/browser' import { NotebookNodeType } from '~/types' import { FEATURE_FLAGS } from 'lib/constants' @@ -37,7 +37,7 @@ const allNotebooks = [ ] const Template: StoryFn = (props) => { - useFeatureFlags([FEATURE_FLAGS.NOTEBOOKS]) + setFeatureFlags([FEATURE_FLAGS.NOTEBOOKS]) useStorybookMocks({ get: { '/api/projects/:team_id/notebooks/': (req, res, ctx) => { diff --git a/frontend/src/scenes/notebooks/NotebookSidebarPlaceholder.tsx b/frontend/src/scenes/notebooks/NotebookSidebarPlaceholder.tsx deleted file mode 100644 index d726210f17cc4..0000000000000 --- a/frontend/src/scenes/notebooks/NotebookSidebarPlaceholder.tsx +++ /dev/null @@ -1,25 +0,0 @@ -import { LemonButton } from '@posthog/lemon-ui' -import { useActions } from 'kea' -import { notebookPopoverLogic } from './Notebook/notebookPopoverLogic' -import { IconArrowRight } from 'lib/lemon-ui/icons' - -export function NotebookSidebarPlaceholder(): JSX.Element { - const { setVisibility } = useActions(notebookPopoverLogic) - - return ( -
-

- This Notebook is open in the sidebar -

- -

- You can navigate around PostHog and drag and drop thing into it. Or you can close the sidebar and - it will be full screen here instead. -

- - setVisibility('hidden')}> - Open it here instead - -
- ) -} diff --git a/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx b/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx index dc0c082733816..258e2cc92d552 100644 --- a/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx +++ b/frontend/src/scenes/notebooks/NotebooksTable/ContainsTypeFilter.tsx @@ -2,7 +2,10 @@ import { NotebookNodeType } from '~/types' import { LemonSelectMultiple } from 'lib/lemon-ui/LemonSelectMultiple' import { NotebooksListFilters } from 'scenes/notebooks/NotebooksTable/notebooksTableLogic' -export const fromNodeTypeToLabel: Omit, NotebookNodeType.Backlink> = { +export const fromNodeTypeToLabel: Omit< + Record, + NotebookNodeType.Backlink | NotebookNodeType.PersonFeed | NotebookNodeType.Properties | NotebookNodeType.Map +> = { [NotebookNodeType.FeatureFlag]: 'Feature flags', [NotebookNodeType.FeatureFlagCodeExample]: 'Feature flag Code Examples', [NotebookNodeType.Experiment]: 'Experiments', diff --git a/frontend/src/scenes/persons/PersonDisplay.tsx b/frontend/src/scenes/persons/PersonDisplay.tsx index d93766c080205..06b3e4b2158ad 100644 --- a/frontend/src/scenes/persons/PersonDisplay.tsx +++ b/frontend/src/scenes/persons/PersonDisplay.tsx @@ -7,6 +7,8 @@ import { PersonPreview } from './PersonPreview' import { useMemo, useState } from 'react' import { router } from 'kea-router' import { asDisplay, asLink } from './person-utils' +import { useNotebookNode } from 'scenes/notebooks/Nodes/notebookNodeLogic' +import { NotebookNodeType } from '~/types' type PersonPropType = | { properties?: Record; distinct_ids?: string[]; distinct_id?: never } @@ -15,6 +17,7 @@ type PersonPropType = export interface PersonDisplayProps { person?: PersonPropType | null withIcon?: boolean | ProfilePictureProps['size'] + href?: string noLink?: boolean noEllipsis?: boolean noPopover?: boolean @@ -45,11 +48,13 @@ export function PersonDisplay({ noPopover, noLink, isCentered, + href = asLink(person), }: PersonDisplayProps): JSX.Element { - const href = asLink(person) const display = asDisplay(person) const [visible, setVisible] = useState(false) + const notebookNode = useNotebookNode() + let content = ( {withIcon && } @@ -67,6 +72,19 @@ export function PersonDisplay({ router.actions.push(href) } else { setVisible(true) + + if (notebookNode && person) { + notebookNode.actions.updateAttributes({ + children: [ + { + type: NotebookNodeType.Person, + attrs: { + id: person.distinct_id || person.distinct_ids?.[0], + }, + }, + ], + }) + } } } : undefined @@ -91,25 +109,26 @@ export function PersonDisplay({ ) - content = noPopover ? ( - content - ) : ( - setVisible(false)} - /> - } - visible={visible} - onClickOutside={() => setVisible(false)} - placement="right" - fallbackPlacements={['bottom', 'top']} - showArrow - > - {content} - - ) + content = + noPopover || notebookNode ? ( + content + ) : ( + setVisible(false)} + /> + } + visible={visible} + onClickOutside={() => setVisible(false)} + placement="right" + fallbackPlacements={['bottom', 'top']} + showArrow + > + {content} + + ) return content } diff --git a/frontend/src/scenes/persons/PersonFeedCanvas.tsx b/frontend/src/scenes/persons/PersonFeedCanvas.tsx new file mode 100644 index 0000000000000..f8fef880bca53 --- /dev/null +++ b/frontend/src/scenes/persons/PersonFeedCanvas.tsx @@ -0,0 +1,52 @@ +import { PersonType } from '~/types' +import { Notebook } from 'scenes/notebooks/Notebook/Notebook' +import { uuid } from 'lib/utils' + +type PersonFeedCanvasProps = { + person: PersonType +} + +const PersonFeedCanvas = ({ person }: PersonFeedCanvasProps): JSX.Element => { + const id = person.id + + const personId = person.distinct_ids[0] + + return ( + + ) +} + +export default PersonFeedCanvas diff --git a/frontend/src/scenes/persons/PersonScene.tsx b/frontend/src/scenes/persons/PersonScene.tsx index 2a7a277deda10..bce2a2b72d130 100644 --- a/frontend/src/scenes/persons/PersonScene.tsx +++ b/frontend/src/scenes/persons/PersonScene.tsx @@ -34,6 +34,7 @@ import { LemonTabs } from 'lib/lemon-ui/LemonTabs' import { PersonDashboard } from './PersonDashboard' import { NotebookSelectButton } from 'scenes/notebooks/NotebookSelectButton/NotebookSelectButton' import { SessionRecordingsPlaylist } from 'scenes/session-recordings/playlist/SessionRecordingsPlaylist' +import PersonFeedCanvas from './PersonFeedCanvas' export const scene: SceneExport = { component: PersonScene, @@ -109,6 +110,7 @@ function PersonCaption({ person }: { person: PersonType }): JSX.Element { export function PersonScene(): JSX.Element | null { const { showCustomerSuccessDashboards, + feedEnabled, person, personLoading, currentTab, @@ -185,6 +187,13 @@ export function PersonScene(): JSX.Element | null { }} data-attr="persons-tabs" tabs={[ + feedEnabled + ? { + key: PersonsTabType.FEED, + label: Feed, + content: , + } + : false, { key: PersonsTabType.PROPERTIES, label: Properties, diff --git a/frontend/src/scenes/persons/RelatedFeatureFlags.tsx b/frontend/src/scenes/persons/RelatedFeatureFlags.tsx index 08a3d1e23dc6f..ffef288d633f5 100644 --- a/frontend/src/scenes/persons/RelatedFeatureFlags.tsx +++ b/frontend/src/scenes/persons/RelatedFeatureFlags.tsx @@ -117,6 +117,16 @@ export function RelatedFeatureFlags({ distinctId, groups }: Props): JSX.Element }, }, ] + + const options = [ + { label: 'All types', value: 'all' }, + { + label: FeatureFlagReleaseType.ReleaseToggle, + value: FeatureFlagReleaseType.ReleaseToggle, + }, + { label: FeatureFlagReleaseType.Variants, value: FeatureFlagReleaseType.Variants }, + ] + return ( <>
@@ -131,14 +141,7 @@ export function RelatedFeatureFlags({ distinctId, groups }: Props): JSX.Element Type { if (type) { if (type === 'all') { diff --git a/frontend/src/scenes/persons/personsLogic.tsx b/frontend/src/scenes/persons/personsLogic.tsx index 005924ec56924..eb1c15fd49a11 100644 --- a/frontend/src/scenes/persons/personsLogic.tsx +++ b/frontend/src/scenes/persons/personsLogic.tsx @@ -136,11 +136,10 @@ export const personsLogic = kea({ : 'https://posthog.com/docs/api/persons', ], cohortId: [() => [(_, props) => props.cohort], (cohort: PersonsLogicProps['cohort']) => cohort], - currentTab: [ - (s) => [s.activeTab], - (activeTab) => { - return activeTab || PersonsTabType.PROPERTIES - }, + currentTab: [(s) => [s.activeTab, s.defaultTab], (activeTab, defaultTab) => activeTab || defaultTab], + defaultTab: [ + (s) => [s.feedEnabled], + (feedEnabled) => (feedEnabled ? PersonsTabType.FEED : PersonsTabType.PROPERTIES), ], breadcrumbs: [ (s) => [s.person, router.selectors.location], @@ -179,6 +178,7 @@ export const personsLogic = kea({ (s) => [s.featureFlags], (featureFlags) => featureFlags[FEATURE_FLAGS.CS_DASHBOARDS], ], + feedEnabled: [(s) => [s.featureFlags], (featureFlags) => !!featureFlags[FEATURE_FLAGS.PERSON_FEED_CANVAS]], }), listeners: ({ actions, values }) => ({ editProperty: async ({ key, newValue }) => { @@ -375,7 +375,7 @@ export const personsLogic = kea({ } if (!activeTab) { - actions.setActiveTab(PersonsTabType.PROPERTIES) + actions.setActiveTab(values.defaultTab) } if (rawPersonDistinctId) { @@ -397,7 +397,7 @@ export const personsLogic = kea({ } if (!activeTab) { - actions.setActiveTab(PersonsTabType.PROPERTIES) + actions.setActiveTab(values.defaultTab) } if (rawPersonUUID) { diff --git a/frontend/src/scenes/sceneTypes.ts b/frontend/src/scenes/sceneTypes.ts index 5922d6e36bd2a..0e35998cd8887 100644 --- a/frontend/src/scenes/sceneTypes.ts +++ b/frontend/src/scenes/sceneTypes.ts @@ -82,6 +82,7 @@ export enum Scene { Feedback = 'Feedback', Notebooks = 'Notebooks', Notebook = 'Notebook', + Canvas = 'Canvas', Products = 'Products', Onboarding = 'Onboarding', } diff --git a/frontend/src/scenes/scenes.ts b/frontend/src/scenes/scenes.ts index 3f4964bdeffbf..d8f43290d737f 100644 --- a/frontend/src/scenes/scenes.ts +++ b/frontend/src/scenes/scenes.ts @@ -327,6 +327,11 @@ export const sceneConfigurations: Partial> = { projectBased: true, name: 'Notebooks', }, + [Scene.Canvas]: { + projectBased: true, + name: 'Canvas', + layout: 'app-raw', + }, } const preserveParams = (url: string) => (_params: Params, searchParams: Params, hashParams: Params) => { @@ -498,4 +503,5 @@ export const routes: Record = { [urls.feedback() + '/*']: Scene.Feedback, [urls.notebook(':shortId')]: Scene.Notebook, [urls.notebooks()]: Scene.Notebooks, + [urls.canvas()]: Scene.Canvas, } diff --git a/frontend/src/scenes/session-recordings/filters/SessionRecordingsFilters.tsx b/frontend/src/scenes/session-recordings/filters/SessionRecordingsFilters.tsx index 949326b351df8..bc1e108bf7c19 100644 --- a/frontend/src/scenes/session-recordings/filters/SessionRecordingsFilters.tsx +++ b/frontend/src/scenes/session-recordings/filters/SessionRecordingsFilters.tsx @@ -101,8 +101,9 @@ export function SessionRecordingsFilters({ size="small" onClick={() => setShowAdvancedFilters(!showAdvancedFilters)} disabledReason={ - hasAdvancedFilters && - 'You are only allowed person filters and a single pageview event to switch back to simple filters' + hasAdvancedFilters + ? 'You are only allowed person filters and a single pageview event (filtered by current url) to switch back to simple filters' + : undefined } data-attr={`session-recordings-show-${showAdvancedFilters ? 'simple' : 'advanced'}-filters`} > diff --git a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx index 3097dbe5e7119..d95fc4bdaee0a 100644 --- a/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx +++ b/frontend/src/scenes/session-recordings/player/SessionRecordingPlayer.tsx @@ -125,7 +125,8 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. const { size } = useResizeBreakpoints( { - 0: 'small', + 0: 'tiny', + 400: 'small', 1000: 'medium', }, playerRef @@ -148,9 +149,9 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. className={clsx('SessionRecordingPlayer', { 'SessionRecordingPlayer--fullscreen': isFullScreen, 'SessionRecordingPlayer--no-border': noBorder, - 'SessionRecordingPlayer--widescreen': !isFullScreen && size !== 'small', + 'SessionRecordingPlayer--widescreen': !isFullScreen && size === 'medium', 'SessionRecordingPlayer--inspector-focus': inspectorFocus, - 'SessionRecordingPlayer--inspector-hidden': noInspector, + 'SessionRecordingPlayer--inspector-hidden': noInspector || size === 'tiny', 'SessionRecordingPlayer--buffering': isBuffering, })} onClick={incrementClickCount} @@ -160,7 +161,7 @@ export function SessionRecordingPlayer(props: SessionRecordingPlayerProps): JSX. ) : ( <>
- {!noMeta || isFullScreen ? : null} + {(!noMeta || isFullScreen) && size !== 'tiny' ? : null}
diff --git a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts index 2c588d43ccf0a..b07d55e3f9d10 100644 --- a/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts +++ b/frontend/src/scenes/session-recordings/playlist/sessionRecordingsPlaylistLogic.ts @@ -79,30 +79,64 @@ export const getDefaultFilters = (personUUID?: PersonUUID): RecordingFilters => return personUUID ? DEFAULT_PERSON_RECORDING_FILTERS : DEFAULT_RECORDING_FILTERS } +function isPageViewFilter(filter: Record): boolean { + return filter.name === '$pageview' +} +function isCurrentURLPageViewFilter(eventsFilter: Record): boolean { + const hasSingleProperty = Array.isArray(eventsFilter.properties) && eventsFilter.properties?.length === 1 + const isCurrentURLProperty = hasSingleProperty && eventsFilter.properties[0].key === '$current_url' + return isPageViewFilter(eventsFilter) && isCurrentURLProperty +} + +// checks are stored against filter keys so that the type system enforces adding a check when we add new filters +const advancedFilterChecks: Record< + keyof RecordingFilters, + (filters: RecordingFilters, defaultFilters: RecordingFilters) => boolean +> = { + actions: (filters) => (filters.actions ? filters.actions.length > 0 : false), + events: function (filters: RecordingFilters): boolean { + const eventsFilters = filters.events || [] + // simple filters allow a single $pageview event filter with $current_url as the selected property + // anything else is advanced + return ( + eventsFilters.length > 1 || + (!!eventsFilters[0] && + (!isPageViewFilter(eventsFilters[0]) || !isCurrentURLPageViewFilter(eventsFilters[0]))) + ) + }, + properties: function (): boolean { + // TODO is this right? should we ever care about properties for choosing between advanced and simple? + return false + }, + date_from: (filters, defaultFilters) => filters.date_from != defaultFilters.date_from, + date_to: (filters, defaultFilters) => filters.date_to != defaultFilters.date_to, + session_recording_duration: (filters, defaultFilters) => + !equal(filters.session_recording_duration, defaultFilters.session_recording_duration), + duration_type_filter: (filters, defaultFilters) => + filters.duration_type_filter !== defaultFilters.duration_type_filter, + console_search_query: (filters) => + filters.console_search_query ? filters.console_search_query.trim().length > 0 : false, + console_logs: (filters) => (filters.console_logs ? filters.console_logs.length > 0 : false), + filter_test_accounts: (filters) => filters.filter_test_accounts ?? false, +} + export const addedAdvancedFilters = ( filters: RecordingFilters | undefined, defaultFilters: RecordingFilters ): boolean => { - if (!filters) { + // if there are no filters or if some filters are not present then the page is still booting up + if (!filters || filters.session_recording_duration === undefined || filters.date_from === undefined) { return false } - const hasActions = filters.actions ? filters.actions.length > 0 : false - const hasChangedDateFrom = filters.date_from != defaultFilters.date_from - const hasChangedDateTo = filters.date_to != defaultFilters.date_to - const hasConsoleLogsFilters = filters.console_logs ? filters.console_logs.length > 0 : false - const hasChangedDuration = !equal(filters.session_recording_duration, defaultFilters.session_recording_duration) - const eventsFilters = filters.events || [] - const hasAdvancedEvents = eventsFilters.length > 1 || (!!eventsFilters[0] && eventsFilters[0].name != '$pageview') - - return ( - hasActions || - hasAdvancedEvents || - hasChangedDuration || - hasChangedDateFrom || - hasChangedDateTo || - hasConsoleLogsFilters - ) + // keeps results with the keys for printing when debugging + const checkResults = Object.keys(advancedFilterChecks).map((key) => ({ + key, + result: advancedFilterChecks[key](filters, defaultFilters), + })) + + // if any check is true, then this is an advanced filter + return checkResults.some((checkResult) => checkResult.result) } export const defaultPageviewPropertyEntityFilter = ( @@ -348,8 +382,14 @@ export const sessionRecordingsPlaylistLogic = kea - addedAdvancedFilters(filters, getDefaultFilters(props.personUUID)) ? true : showingAdvancedFilters, + persist: true, + }, + { + setFilters: (showingAdvancedFilters, { filters }) => { + return addedAdvancedFilters(filters, getDefaultFilters(props.personUUID)) + ? true + : showingAdvancedFilters + }, setShowAdvancedFilters: (_, { showAdvancedFilters }) => showAdvancedFilters, }, ], diff --git a/frontend/src/scenes/surveys/SurveyAppearance.tsx b/frontend/src/scenes/surveys/SurveyAppearance.tsx index b964554d61360..5f90af5447c65 100644 --- a/frontend/src/scenes/surveys/SurveyAppearance.tsx +++ b/frontend/src/scenes/surveys/SurveyAppearance.tsx @@ -490,8 +490,10 @@ export function SurveyRatingAppearance({
)} -
{question}
- {description &&
{description}
} +
+ {description && ( +
+ )}
{ratingSurveyQuestion.display === 'emoji' && ( @@ -588,8 +590,10 @@ export function SurveyMultipleChoiceAppearance({
)} -
{question}
- {description &&
{description}
} +
+ {description && ( +
+ )}
{(multipleChoiceQuestion.choices || []).map((choice, idx) => (
diff --git a/frontend/src/scenes/trends/trendsDataLogic.ts b/frontend/src/scenes/trends/trendsDataLogic.ts index 06e95485b8e80..5ee67ec453f4e 100644 --- a/frontend/src/scenes/trends/trendsDataLogic.ts +++ b/frontend/src/scenes/trends/trendsDataLogic.ts @@ -143,7 +143,7 @@ export const trendsDataLogic = kea([ actions.setInsightData({ ...values.insightData, - result: [...values.insightData?.result, ...(response.result ? response.result : [])], + result: [...values.insightData.result, ...(response.result ? response.result : [])], next: response.next, }) diff --git a/frontend/src/scenes/urls.ts b/frontend/src/scenes/urls.ts index 40ac033a7576c..930aa0c14d6a1 100644 --- a/frontend/src/scenes/urls.ts +++ b/frontend/src/scenes/urls.ts @@ -186,4 +186,5 @@ export const urls = { issues: (): string => '/issues', notebooks: (): string => '/notebooks', notebook: (shortId: string): string => `/notebooks/${shortId}`, + canvas: (): string => `/canvas`, } diff --git a/frontend/src/scenes/web-analytics/WebDashboard.tsx b/frontend/src/scenes/web-analytics/WebDashboard.tsx index fb0c247e79e55..1d07f69b5d61e 100644 --- a/frontend/src/scenes/web-analytics/WebDashboard.tsx +++ b/frontend/src/scenes/web-analytics/WebDashboard.tsx @@ -9,6 +9,10 @@ import { QueryContext, QueryContextColumnComponent, QueryContextColumnTitleCompo import { useCallback } from 'react' import { UnexpectedNeverError } from 'lib/utils' import { DateFilter } from 'lib/components/DateFilter/DateFilter' +import { LemonBanner } from 'lib/lemon-ui/LemonBanner' +import { supportLogic } from 'lib/components/Support/supportLogic' +import { IconBugReport, IconFeedback, IconGithub } from 'lib/lemon-ui/icons' +import { preflightLogic } from 'scenes/PreflightCheck/preflightLogic' const PercentageCell: QueryContextColumnComponent = ({ value }) => { if (typeof value === 'number') { @@ -209,9 +213,38 @@ const Tiles = (): JSX.Element => { ) } +export const Notice = (): JSX.Element => { + const { openSupportForm } = useActions(supportLogic) + const { preflight } = useValues(preflightLogic) + + const showSupportOptions = preflight?.cloud + + return ( + +

PostHog Web Analytics is in closed Alpha. Thanks for taking part! We'd love to hear what you think.

+ {showSupportOptions ? ( +

+ openSupportForm('bug')}> + Report a bug + {' '} + -{' '} + openSupportForm('feedback')}> + Give feedback + {' '} + -{' '} + + View GitHub issue + +

+ ) : null} +
+ ) +} + export const WebAnalyticsDashboard = (): JSX.Element => { return ( -
+
+
diff --git a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts index 613b919f1483f..04c7bf786508b 100644 --- a/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts +++ b/frontend/src/scenes/web-analytics/webAnalyticsLogic.ts @@ -2,14 +2,7 @@ import { actions, connect, kea, listeners, path, reducers, selectors, sharedList import type { webAnalyticsLogicType } from './webAnalyticsLogicType' import { NodeKind, QuerySchema, WebAnalyticsPropertyFilters, WebStatsBreakdown } from '~/queries/schema' -import { - BaseMathType, - ChartDisplayType, - EventPropertyFilter, - HogQLPropertyFilter, - PropertyFilterType, - PropertyOperator, -} from '~/types' +import { EventPropertyFilter, HogQLPropertyFilter, PropertyFilterType, PropertyOperator } from '~/types' import { isNotNil } from 'lib/utils' interface Layout { @@ -363,63 +356,63 @@ export const webAnalyticsLogic = kea([ }, ], }, - { - title: 'Unique users', - layout: { - colSpan: 6, - }, - query: { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - dateRange, - interval: 'day', - series: [ - { - event: '$pageview', - kind: NodeKind.EventsNode, - math: BaseMathType.UniqueUsers, - name: '$pageview', - }, - ], - trendsFilter: { - compare: true, - display: ChartDisplayType.ActionsLineGraph, - }, - filterTestAccounts: true, - properties: webAnalyticsFilters, - }, - }, - }, - { - title: 'World Map (Unique Users)', - layout: { - colSpan: 6, - }, - query: { - kind: NodeKind.InsightVizNode, - source: { - kind: NodeKind.TrendsQuery, - breakdown: { - breakdown: '$geoip_country_code', - breakdown_type: 'person', - }, - dateRange, - series: [ - { - event: '$pageview', - kind: NodeKind.EventsNode, - math: BaseMathType.UniqueUsers, - }, - ], - trendsFilter: { - display: ChartDisplayType.WorldMap, - }, - filterTestAccounts: true, - properties: webAnalyticsFilters, - }, - }, - }, + // { + // title: 'Unique visitors', + // layout: { + // colSpan: 6, + // }, + // query: { + // kind: NodeKind.InsightVizNode, + // source: { + // kind: NodeKind.TrendsQuery, + // dateRange, + // interval: 'day', + // series: [ + // { + // event: '$pageview', + // kind: NodeKind.EventsNode, + // math: BaseMathType.UniqueUsers, + // name: '$pageview', + // }, + // ], + // trendsFilter: { + // compare: true, + // display: ChartDisplayType.ActionsLineGraph, + // }, + // filterTestAccounts: true, + // properties: webAnalyticsFilters, + // }, + // }, + // }, + // { + // title: 'World Map (Unique Users)', + // layout: { + // colSpan: 6, + // }, + // query: { + // kind: NodeKind.InsightVizNode, + // source: { + // kind: NodeKind.TrendsQuery, + // breakdown: { + // breakdown: '$geoip_country_code', + // breakdown_type: 'person', + // }, + // dateRange, + // series: [ + // { + // event: '$pageview', + // kind: NodeKind.EventsNode, + // math: BaseMathType.UniqueUsers, + // }, + // ], + // trendsFilter: { + // display: ChartDisplayType.WorldMap, + // }, + // filterTestAccounts: true, + // properties: webAnalyticsFilters, + // }, + // }, + // }, ] }, ], diff --git a/frontend/src/styles/vars.scss b/frontend/src/styles/vars.scss index 7f18952abf6f0..45adf1ab8d342 100644 --- a/frontend/src/styles/vars.scss +++ b/frontend/src/styles/vars.scss @@ -323,5 +323,6 @@ $_lifecycle_dormant: $_danger; // Notebooks --notebook-popover-transition-properties: 150ms cubic-bezier(0, 0.5, 0.5, 1); - --notebook-sidebar-width: 27rem; + --notebook-column-left-width: 27rem; + --notebook-column-right-width: 20rem; } diff --git a/frontend/src/types.ts b/frontend/src/types.ts index a8b702e55b296..c59ab2d0a9f50 100644 --- a/frontend/src/types.ts +++ b/frontend/src/types.ts @@ -935,6 +935,7 @@ export enum StepOrderValue { } export enum PersonsTabType { + FEED = 'feed', EVENTS = 'events', SESSION_RECORDINGS = 'sessionRecordings', PROPERTIES = 'properties', @@ -3080,6 +3081,9 @@ export enum NotebookNodeType { Backlink = 'ph-backlink', ReplayTimestamp = 'ph-replay-timestamp', Image = 'ph-image', + PersonFeed = 'ph-person-feed', + Properties = 'ph-properties', + Map = 'ph-map', } export type NotebookNodeResource = { diff --git a/gunicorn.config.py b/gunicorn.config.py index add45d44880d5..1e56182026068 100644 --- a/gunicorn.config.py +++ b/gunicorn.config.py @@ -140,15 +140,25 @@ def run(self): Every X seconds, check the status of the Thread pool, as well as the """ active_worker_connections = Gauge( - "gunicorn_active_worker_connections", "Number of active connections.", labelnames=["pid"] + "gunicorn_active_worker_connections", + "Number of active connections.", + labelnames=["pid"], ) max_worker_connections = Gauge( - "gunicorn_max_worker_connections", "Maximum worker connections.", labelnames=["pid"] + "gunicorn_max_worker_connections", + "Maximum worker connections.", + labelnames=["pid"], ) - total_threads = Gauge("gunicorn_max_worker_threads", "Size of the thread pool per worker.", labelnames=["pid"]) + total_threads = Gauge( + "gunicorn_max_worker_threads", + "Size of the thread pool per worker.", + labelnames=["pid"], + ) active_threads = Gauge( - "gunicorn_active_worker_threads", "Number of threads actively processing requests.", labelnames=["pid"] + "gunicorn_active_worker_threads", + "Number of threads actively processing requests.", + labelnames=["pid"], ) pending_requests = Gauge( @@ -170,6 +180,7 @@ def run(self): LOGGING_FORMATTER_NAME = os.getenv("LOGGING_FORMATTER_NAME", "default") + # Setup stdlib logging to be handled by Structlog def add_pid_and_tid( logger: logging.Logger, method_name: str, event_dict: structlog.types.EventDict diff --git a/hogvm/python/execute.py b/hogvm/python/execute.py index 15bbbace68a50..cce77cbc86ae7 100644 --- a/hogvm/python/execute.py +++ b/hogvm/python/execute.py @@ -57,9 +57,9 @@ def execute_bytecode(bytecode: List[Any], fields: Dict[str, Any]) -> Any: case Operation.NOT: stack.append(not stack.pop()) case Operation.AND: - stack.append(all([stack.pop() for _ in range(next(iterator))])) + stack.append(all([stack.pop() for _ in range(next(iterator))])) # noqa: C419 case Operation.OR: - stack.append(any([stack.pop() for _ in range(next(iterator))])) + stack.append(any([stack.pop() for _ in range(next(iterator))])) # noqa: C419 case Operation.PLUS: stack.append(stack.pop() + stack.pop()) case Operation.MINUS: diff --git a/hogvm/python/test/test_execute.py b/hogvm/python/test/test_execute.py index c99ed451f13a3..5b7838995d3ac 100644 --- a/hogvm/python/test/test_execute.py +++ b/hogvm/python/test/test_execute.py @@ -81,7 +81,11 @@ def test_bytecode_create(self): def test_nested_value(self): my_dict = { - "properties": {"bla": "hello", "list": ["item1", "item2", "item3"], "tuple": ("item1", "item2", "item3")} + "properties": { + "bla": "hello", + "list": ["item1", "item2", "item3"], + "tuple": ("item1", "item2", "item3"), + } } chain = ["properties", "bla"] self.assertEqual(get_nested_value(my_dict, chain), "hello") diff --git a/package.json b/package.json index 3ffa52cc67565..85796f23468b3 100644 --- a/package.json +++ b/package.json @@ -84,6 +84,8 @@ "@tiptap/extension-document": "^2.1.0-rc.12", "@tiptap/extension-floating-menu": "^2.1.0-rc.12", "@tiptap/extension-placeholder": "^2.1.0-rc.12", + "@tiptap/extension-task-item": "^2.1.11", + "@tiptap/extension-task-list": "^2.1.11", "@tiptap/pm": "^2.1.0-rc.12", "@tiptap/react": "^2.1.0-rc.12", "@tiptap/starter-kit": "^2.1.0-rc.12", @@ -128,10 +130,11 @@ "kea-test-utils": "^0.2.4", "kea-waitfor": "^0.2.1", "kea-window-values": "^3.0.0", + "maplibre-gl": "^3.5.1", "md5": "^2.3.0", "monaco-editor": "^0.39.0", "papaparse": "^5.4.1", - "posthog-js": "1.85.3", + "posthog-js": "1.86.0", "posthog-js-lite": "2.0.0-alpha5", "prettier": "^2.8.8", "prop-types": "^15.7.2", @@ -223,12 +226,11 @@ "@types/react-resizable": "^3.0.4", "@types/react-syntax-highlighter": "^15.5.7", "@types/react-textfit": "^1.1.0", - "@types/react-transition-group": "^4.4.4", "@types/react-virtualized": "^9.21.14", "@types/testing-library__jest-dom": "^5.14.5", "@types/zxcvbn": "^4.4.0", - "@typescript-eslint/eslint-plugin": "^6.4.0", - "@typescript-eslint/parser": "^6.4.0", + "@typescript-eslint/eslint-plugin": "^6.9.0", + "@typescript-eslint/parser": "^6.9.0", "autoprefixer": "^10.4.7", "axe-core": "^4.4.3", "babel-loader": "^8.0.6", @@ -239,15 +241,16 @@ "cypress": "^13.3.0", "cypress-axe": "^1.5.0", "cypress-terminal-report": "^5.3.7", - "eslint": "^7.8.0", - "eslint-config-prettier": "^8.8.0", + "eslint": "^8.52.0", + "eslint-config-prettier": "^9.0.0", "eslint-plugin-compat": "^4.2.0", - "eslint-plugin-cypress": "^2.13.3", + "eslint-plugin-cypress": "^2.15.1", "eslint-plugin-eslint-comments": "^3.2.0", - "eslint-plugin-jest": "^27.2.3", - "eslint-plugin-no-only-tests": "^3.0.0", - "eslint-plugin-prettier": "^3.1.4", - "eslint-plugin-react": "^7.32.2", + "eslint-plugin-jest": "^27.4.3", + "eslint-plugin-no-only-tests": "^3.1.0", + "eslint-plugin-posthog": "link:./eslint-rules", + "eslint-plugin-prettier": "^5.0.1", + "eslint-plugin-react": "^7.33.2", "eslint-plugin-storybook": "^0.6.15", "file-loader": "^6.1.0", "givens": "^1.3.6", diff --git a/plugin-server/bin/generate_session_recordings_messages.py b/plugin-server/bin/generate_session_recordings_messages.py index a685a66f888fb..4b5462bebd3a7 100755 --- a/plugin-server/bin/generate_session_recordings_messages.py +++ b/plugin-server/bin/generate_session_recordings_messages.py @@ -242,11 +242,15 @@ def generate_snapshot_messages( ) incremental_snapshot_count_samples = sample_log_normal_distribution( - incremental_snapshot_count_mean, incremental_snapshot_count_standard_deviation, count + incremental_snapshot_count_mean, + incremental_snapshot_count_standard_deviation, + count, ) full_snapshot_size_samples = sample_log_normal_distribution( - full_snapshot_size_mean, full_snapshot_size_standard_deviation, max(full_snapshot_count_samples) + full_snapshot_size_mean, + full_snapshot_size_standard_deviation, + max(full_snapshot_count_samples), ) incremental_snapshot_size_samples = sample_log_normal_distribution( diff --git a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts index 47db9a4ad93ac..b4293e2f538ca 100644 --- a/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts +++ b/plugin-server/src/main/ingestion-queues/batch-processing/each-batch-ingestion.ts @@ -313,7 +313,10 @@ export function splitIngestionBatch( } const pluginEvent = formPipelineEvent(message) const eventKey = computeKey(pluginEvent) - if (overflowMode === IngestionOverflowMode.Reroute && !ConfiguredLimiter.consume(eventKey, 1)) { + if ( + overflowMode === IngestionOverflowMode.Reroute && + !ConfiguredLimiter.consume(eventKey, 1, message.timestamp) + ) { // Local overflow detection triggering, reroute to overflow topic too message.key = null ingestionPartitionKeyOverflowed.labels(`${pluginEvent.team_id ?? pluginEvent.token}`).inc() diff --git a/plugin-server/src/utils/token-bucket.ts b/plugin-server/src/utils/token-bucket.ts index 30f9e1e846e7c..962383554c4a2 100644 --- a/plugin-server/src/utils/token-bucket.ts +++ b/plugin-server/src/utils/token-bucket.ts @@ -21,22 +21,20 @@ export class Storage { } replenish(key: string, now?: number): void { - if (typeof now === 'undefined') { - now = Date.now() - } - - if (this.buckets.has(key) === false) { - this.buckets.set(key, [this.bucketCapacity, now]) + const replenish_timestamp: number = now ?? Date.now() + const bucket = this.buckets.get(key) + if (bucket === undefined) { + this.buckets.set(key, [this.bucketCapacity, replenish_timestamp]) return } - // We have checked the key exists already, so this cannot be undefined - const bucket: Bucket = this.buckets.get(key)! - - // replenishRate is per second, but timestamps are in milliseconds - const replenishedTokens = this.replenishRate * ((now - bucket[1]) / 1000) + bucket[0] - bucket[0] = Math.min(replenishedTokens, this.bucketCapacity) - bucket[1] = now + // Replenish the bucket if replenish_timestamp is higher than lastReplenishedTimestamp + const secondsToReplenish = (replenish_timestamp - bucket[1]) / 1000 + if (secondsToReplenish > 0) { + bucket[0] += this.replenishRate * secondsToReplenish + bucket[0] = Math.min(bucket[0], this.bucketCapacity) + bucket[1] = replenish_timestamp + } } consume(key: string, tokens: number): boolean { diff --git a/plugin-server/tests/main/ingestion-queues/analytics-events-ingestion-consumer.test.ts b/plugin-server/tests/main/ingestion-queues/analytics-events-ingestion-consumer.test.ts index cbb3d06e6a29d..462677a9b46f7 100644 --- a/plugin-server/tests/main/ingestion-queues/analytics-events-ingestion-consumer.test.ts +++ b/plugin-server/tests/main/ingestion-queues/analytics-events-ingestion-consumer.test.ts @@ -4,6 +4,7 @@ import { IngestionOverflowMode, } from '../../../src/main/ingestion-queues/batch-processing/each-batch-ingestion' import { ConfiguredLimiter } from '../../../src/utils/token-bucket' +import { runEventPipeline } from './../../../src/worker/ingestion/event-pipeline/runner' import { captureIngestionWarning } from './../../../src/worker/ingestion/utils' jest.mock('../../../src/utils/status') @@ -12,7 +13,6 @@ jest.mock('./../../../src/worker/ingestion/utils') jest.mock('./../../../src/worker/ingestion/event-pipeline/runner', () => ({ runEventPipeline: jest.fn().mockResolvedValue('default value'), })) -import { runEventPipeline } from './../../../src/worker/ingestion/event-pipeline/runner' const captureEndpointEvent = { uuid: 'uuid1', @@ -94,14 +94,16 @@ describe('eachBatchParallelIngestion with overflow reroute', () => { }) it('reroutes excess events to OVERFLOW topic', async () => { - const batch = createBatchWithMultipleEventsWithKeys([captureEndpointEvent]) + const now = Date.now() + const batch = createBatchWithMultipleEventsWithKeys([captureEndpointEvent], now) const consume = jest.spyOn(ConfiguredLimiter, 'consume').mockImplementation(() => false) await eachBatchParallelIngestion(batch, queue, IngestionOverflowMode.Reroute) expect(consume).toHaveBeenCalledWith( captureEndpointEvent['team_id'] + ':' + captureEndpointEvent['distinct_id'], - 1 + 1, + now ) expect(captureIngestionWarning).not.toHaveBeenCalled() expect(queue.pluginsServer.kafkaProducer.produce).toHaveBeenCalledWith({ @@ -118,14 +120,16 @@ describe('eachBatchParallelIngestion with overflow reroute', () => { }) it('does not reroute if not over capacity limit', async () => { - const batch = createBatchWithMultipleEventsWithKeys([captureEndpointEvent]) + const now = Date.now() + const batch = createBatchWithMultipleEventsWithKeys([captureEndpointEvent], now) const consume = jest.spyOn(ConfiguredLimiter, 'consume').mockImplementation(() => true) await eachBatchParallelIngestion(batch, queue, IngestionOverflowMode.Reroute) expect(consume).toHaveBeenCalledWith( captureEndpointEvent['team_id'] + ':' + captureEndpointEvent['distinct_id'], - 1 + 1, + now ) expect(captureIngestionWarning).not.toHaveBeenCalled() expect(queue.pluginsServer.kafkaProducer.produce).not.toHaveBeenCalled() diff --git a/plugin-server/tests/utils/token-bucket.test.ts b/plugin-server/tests/utils/token-bucket.test.ts index 0b1fe853436a5..8b12ccdf29dcd 100644 --- a/plugin-server/tests/utils/token-bucket.test.ts +++ b/plugin-server/tests/utils/token-bucket.test.ts @@ -59,6 +59,10 @@ describe('Storage', () => { expect(storage.buckets.get(key)![0]).toEqual(10) expect(storage.buckets.get(key)![1]).toEqual(now.valueOf()) + // get two tokens to be replenished + storage.consume(key, 2) + expect(storage.buckets.get(key)![0]).toEqual(8) + // 20 seconds would exceed capacity of 10 tokens at 1 token/sec. storage.replenish(key, now.valueOf() + 20000) @@ -66,6 +70,27 @@ describe('Storage', () => { expect(storage.buckets.get(key)![0]).toEqual(10) expect(storage.buckets.get(key)![1]).toEqual(now.valueOf() + 20000) }) + + it('does not add if now is in the past', () => { + const key = 'test' + const storage = new Storage(10, 1) + const now = new Date('2023-02-08T08:00:00') + + storage.replenish(key, now.valueOf()) + expect(storage.buckets.get(key)![0]).toEqual(10) + expect(storage.buckets.get(key)![1]).toEqual(now.valueOf()) + + // get two tokens to be replenished + storage.consume(key, 2) + expect(storage.buckets.get(key)![0]).toEqual(8) + + // Will be a no-op due to a lower now value + storage.replenish(key, now.valueOf() - 20000) + + expect(storage.buckets.has(key)).toEqual(true) + expect(storage.buckets.get(key)![0]).toEqual(8) + expect(storage.buckets.get(key)![1]).toEqual(now.valueOf()) + }) }) describe('consume()', () => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 4877b89c3edb9..ba38929ba6fa4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,4 +1,4 @@ -lockfileVersion: '6.1' +lockfileVersion: '6.0' settings: autoInstallPeers: true @@ -65,6 +65,12 @@ dependencies: '@tiptap/extension-placeholder': specifier: ^2.1.0-rc.12 version: 2.1.0-rc.12(@tiptap/core@2.1.0-rc.12)(@tiptap/pm@2.1.0-rc.12) + '@tiptap/extension-task-item': + specifier: ^2.1.11 + version: 2.1.11(@tiptap/core@2.1.0-rc.12)(@tiptap/pm@2.1.0-rc.12) + '@tiptap/extension-task-list': + specifier: ^2.1.11 + version: 2.1.11(@tiptap/core@2.1.0-rc.12) '@tiptap/pm': specifier: ^2.1.0-rc.12 version: 2.1.0-rc.12 @@ -197,6 +203,9 @@ dependencies: kea-window-values: specifier: ^3.0.0 version: 3.0.0(kea@3.1.5) + maplibre-gl: + specifier: ^3.5.1 + version: 3.5.1 md5: specifier: ^2.3.0 version: 2.3.0 @@ -207,8 +216,8 @@ dependencies: specifier: ^5.4.1 version: 5.4.1 posthog-js: - specifier: 1.85.3 - version: 1.85.3 + specifier: 1.86.0 + version: 1.86.0 posthog-js-lite: specifier: 2.0.0-alpha5 version: 2.0.0-alpha5 @@ -481,11 +490,11 @@ devDependencies: specifier: ^4.4.0 version: 4.4.1 '@typescript-eslint/eslint-plugin': - specifier: ^6.4.0 - version: 6.4.0(@typescript-eslint/parser@6.4.0)(eslint@7.32.0)(typescript@4.9.5) + specifier: ^6.9.0 + version: 6.9.0(@typescript-eslint/parser@6.9.0)(eslint@8.52.0)(typescript@4.9.5) '@typescript-eslint/parser': - specifier: ^6.4.0 - version: 6.4.0(eslint@7.32.0)(typescript@4.9.5) + specifier: ^6.9.0 + version: 6.9.0(eslint@8.52.0)(typescript@4.9.5) autoprefixer: specifier: ^10.4.7 version: 10.4.13(postcss@8.4.31) @@ -517,35 +526,38 @@ devDependencies: specifier: ^5.3.7 version: 5.3.7(cypress@13.3.0) eslint: - specifier: ^7.8.0 - version: 7.32.0 + specifier: ^8.52.0 + version: 8.52.0 eslint-config-prettier: - specifier: ^8.8.0 - version: 8.8.0(eslint@7.32.0) + specifier: ^9.0.0 + version: 9.0.0(eslint@8.52.0) eslint-plugin-compat: specifier: ^4.2.0 - version: 4.2.0(eslint@7.32.0) + version: 4.2.0(eslint@8.52.0) eslint-plugin-cypress: - specifier: ^2.13.3 - version: 2.13.3(eslint@7.32.0) + specifier: ^2.15.1 + version: 2.15.1(eslint@8.52.0) eslint-plugin-eslint-comments: specifier: ^3.2.0 - version: 3.2.0(eslint@7.32.0) + version: 3.2.0(eslint@8.52.0) eslint-plugin-jest: - specifier: ^27.2.3 - version: 27.2.3(@typescript-eslint/eslint-plugin@6.4.0)(eslint@7.32.0)(jest@29.3.1)(typescript@4.9.5) + specifier: ^27.4.3 + version: 27.4.3(@typescript-eslint/eslint-plugin@6.9.0)(eslint@8.52.0)(jest@29.3.1)(typescript@4.9.5) eslint-plugin-no-only-tests: - specifier: ^3.0.0 + specifier: ^3.1.0 version: 3.1.0 + eslint-plugin-posthog: + specifier: link:./eslint-rules + version: link:eslint-rules eslint-plugin-prettier: - specifier: ^3.1.4 - version: 3.4.1(eslint-config-prettier@8.8.0)(eslint@7.32.0)(prettier@2.8.8) + specifier: ^5.0.1 + version: 5.0.1(eslint-config-prettier@9.0.0)(eslint@8.52.0)(prettier@2.8.8) eslint-plugin-react: - specifier: ^7.32.2 - version: 7.32.2(eslint@7.32.0) + specifier: ^7.33.2 + version: 7.33.2(eslint@8.52.0) eslint-plugin-storybook: specifier: ^0.6.15 - version: 0.6.15(eslint@7.32.0)(typescript@4.9.5) + version: 0.6.15(eslint@8.52.0)(typescript@4.9.5) file-loader: specifier: ^6.1.0 version: 6.2.0(webpack@5.88.2) @@ -657,6 +669,11 @@ devDependencies: packages: + /@aashutoshrathi/word-wrap@1.2.6: + resolution: {integrity: sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==} + engines: {node: '>=0.10.0'} + dev: true + /@adobe/css-tools@4.0.1: resolution: {integrity: sha512-+u76oB43nOHrF4DDWRLWDCtci7f3QJoEBigemIdIeTi1ODqjx6Tad9NCVnPRwewWlKkVab5PlK8DCtPTyX7S8g==} dev: true @@ -714,12 +731,6 @@ packages: default-browser-id: 3.0.0 dev: true - /@babel/code-frame@7.12.11: - resolution: {integrity: sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==} - dependencies: - '@babel/highlight': 7.22.10 - dev: true - /@babel/code-frame@7.22.10: resolution: {integrity: sha512-/KKIMG4UEL35WmI9OlvMhurwtytjvXoFcGNrOvyG9zIzA8YmPjVtIZUf7b05+TPO7G7/GEmLHDaoCgACHl9hhA==} engines: {node: '>=6.9.0'} @@ -2426,13 +2437,13 @@ packages: dev: true optional: true - /@eslint-community/eslint-utils@4.4.0(eslint@7.32.0): + /@eslint-community/eslint-utils@4.4.0(eslint@8.52.0): resolution: {integrity: sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 dependencies: - eslint: 7.32.0 + eslint: 8.52.0 eslint-visitor-keys: 3.4.3 dev: true @@ -2441,23 +2452,28 @@ packages: engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} dev: true - /@eslint/eslintrc@0.4.3: - resolution: {integrity: sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==} - engines: {node: ^10.12.0 || >=12.0.0} + /@eslint/eslintrc@2.1.2: + resolution: {integrity: sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: ajv: 6.12.6 debug: 4.3.4(supports-color@8.1.1) - espree: 7.3.1 - globals: 13.17.0 - ignore: 4.0.6 + espree: 9.6.1 + globals: 13.23.0 + ignore: 5.2.4 import-fresh: 3.3.0 - js-yaml: 3.14.1 + js-yaml: 4.1.0 minimatch: 3.1.2 strip-json-comments: 3.1.1 transitivePeerDependencies: - supports-color dev: true + /@eslint/js@8.52.0: + resolution: {integrity: sha512-mjZVbpaeMZludF2fsWLD0Z9gCref1Tk4i9+wddjRvpUNqqcndPkBD09N/Mapey0b3jaXbLm2kICwFv2E64QinA==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + dev: true + /@fal-works/esbuild-plugin-global-externals@2.1.2: resolution: {integrity: sha512-cEee/Z+I12mZcFJshKcCqC8tuX5hG3s+d+9nZ3LabqKF1vKdF41B92pJVCBggjAGORAeOzyyDDKrZwIkLffeOQ==} dev: true @@ -2535,19 +2551,24 @@ packages: scheduler: 0.19.1 dev: true - /@humanwhocodes/config-array@0.5.0: - resolution: {integrity: sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==} + /@humanwhocodes/config-array@0.11.13: + resolution: {integrity: sha512-JSBDMiDKSzQVngfRjOdFXgFfklaXI4K9nLF49Auh21lmBWRLIK3+xTErTWD4KU54pb6coM6ESE7Awz/FNU3zgQ==} engines: {node: '>=10.10.0'} dependencies: - '@humanwhocodes/object-schema': 1.2.1 + '@humanwhocodes/object-schema': 2.0.1 debug: 4.3.4(supports-color@8.1.1) minimatch: 3.1.2 transitivePeerDependencies: - supports-color dev: true - /@humanwhocodes/object-schema@1.2.1: - resolution: {integrity: sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==} + /@humanwhocodes/module-importer@1.0.1: + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + dev: true + + /@humanwhocodes/object-schema@2.0.1: + resolution: {integrity: sha512-dvuCeX5fC9dXgJn9t+X5atfmgQAzUOWqS1254Gh0m6i8wKd10ebXkfNKiRK+1GWi/yTvvLDHpoxLr0xxxeslWw==} dev: true /@isaacs/cliui@8.0.2: @@ -3085,6 +3106,54 @@ packages: react: 16.14.0 dev: false + /@mapbox/geojson-rewind@0.5.2: + resolution: {integrity: sha512-tJaT+RbYGJYStt7wI3cq4Nl4SXxG8W7JDG5DMJu97V25RnbNg3QtQtf+KD+VLjNpWKYsRvXDNmNrBgEETr1ifA==} + hasBin: true + dependencies: + get-stream: 6.0.1 + minimist: 1.2.8 + dev: false + + /@mapbox/jsonlint-lines-primitives@2.0.2: + resolution: {integrity: sha512-rY0o9A5ECsTQRVhv7tL/OyDpGAoUB4tTvLiW1DSzQGq4bvTPhNw1VpSNjDJc5GFZ2XuyOtSWSVN05qOtcD71qQ==} + engines: {node: '>= 0.6'} + dev: false + + /@mapbox/point-geometry@0.1.0: + resolution: {integrity: sha512-6j56HdLTwWGO0fJPlrZtdU/B13q8Uwmo18Ck2GnGgN9PCFyKTZ3UbXeEdRFh18i9XQ92eH2VdtpJHpBD3aripQ==} + dev: false + + /@mapbox/tiny-sdf@2.0.6: + resolution: {integrity: sha512-qMqa27TLw+ZQz5Jk+RcwZGH7BQf5G/TrutJhspsca/3SHwmgKQ1iq+d3Jxz5oysPVYTGP6aXxCo5Lk9Er6YBAA==} + dev: false + + /@mapbox/unitbezier@0.0.1: + resolution: {integrity: sha512-nMkuDXFv60aBr9soUG5q+GvZYL+2KZHVvsqFCzqnkGEf46U2fvmytHaEVc1/YZbiLn8X+eR3QzX1+dwDO1lxlw==} + dev: false + + /@mapbox/vector-tile@1.3.1: + resolution: {integrity: sha512-MCEddb8u44/xfQ3oD+Srl/tNcQoqTw3goGk2oLsrFxOTc3dUp+kAnby3PvAeeBYSMSjSPD1nd1AJA6W49WnoUw==} + dependencies: + '@mapbox/point-geometry': 0.1.0 + dev: false + + /@mapbox/whoots-js@3.1.0: + resolution: {integrity: sha512-Es6WcD0nO5l+2BOQS4uLfNPYQaNDfbot3X1XUoloz+x0mPDS3eeORZJl06HXjwBG1fOGwCRnzK88LMdxKRrd6Q==} + engines: {node: '>=6.0.0'} + dev: false + + /@maplibre/maplibre-gl-style-spec@19.3.3: + resolution: {integrity: sha512-cOZZOVhDSulgK0meTsTkmNXb1ahVvmTmWmfx9gRBwc6hq98wS9JP35ESIoNq3xqEan+UN+gn8187Z6E4NKhLsw==} + hasBin: true + dependencies: + '@mapbox/jsonlint-lines-primitives': 2.0.2 + '@mapbox/unitbezier': 0.0.1 + json-stringify-pretty-compact: 3.0.0 + minimist: 1.2.8 + rw: 1.3.3 + sort-object: 3.0.3 + dev: false + /@maxmind/geoip2-node@3.5.0: resolution: {integrity: sha512-WG2TNxMwDWDOrljLwyZf5bwiEYubaHuICvQRlgz74lE9OZA/z4o+ZT6OisjDBAZh/yRJVNK6mfHqmP5lLlAwsA==} dependencies: @@ -3215,6 +3284,18 @@ packages: dev: true optional: true + /@pkgr/utils@2.4.2: + resolution: {integrity: sha512-POgTXhjrTfbTV63DiFXav4lBHiICLKKwDeaKn9Nphwj7WH6m0hMMCaJkMyRWjgtPFyRKRVoMXXjczsTQRDEhYw==} + engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + dependencies: + cross-spawn: 7.0.3 + fast-glob: 3.3.1 + is-glob: 4.0.3 + open: 9.1.0 + picocolors: 1.0.0 + tslib: 2.6.2 + dev: true + /@playwright/test@1.29.2: resolution: {integrity: sha512-+3/GPwOgcoF0xLz/opTnahel1/y42PdcgZ4hs+BZGIUjtmEFSXGg+nFoaH3NSmuc7a6GSFwXDJ5L7VXpqzigNg==} engines: {node: '>=14'} @@ -5505,6 +5586,24 @@ packages: '@tiptap/core': 2.1.0-rc.12(@tiptap/pm@2.1.0-rc.12) dev: false + /@tiptap/extension-task-item@2.1.11(@tiptap/core@2.1.0-rc.12)(@tiptap/pm@2.1.0-rc.12): + resolution: {integrity: sha512-721inc/MAZkljPup/EWCpNho4nf+XrYVKWRixqgX+AjikusTJefylbiZ5OeRn+71osTA7SdnXiKkM2ZbHtAsYA==} + peerDependencies: + '@tiptap/core': ^2.0.0 + '@tiptap/pm': ^2.0.0 + dependencies: + '@tiptap/core': 2.1.0-rc.12(@tiptap/pm@2.1.0-rc.12) + '@tiptap/pm': 2.1.0-rc.12 + dev: false + + /@tiptap/extension-task-list@2.1.11(@tiptap/core@2.1.0-rc.12): + resolution: {integrity: sha512-9C1M9N3jbNjm4001mPkgwUH19b6ZvKj5nnRT3zib/gFIQLOnSHE3VErDPHP/lkkjH84LgOMrm69cm8chQpgNsA==} + peerDependencies: + '@tiptap/core': ^2.0.0 + dependencies: + '@tiptap/core': 2.1.0-rc.12(@tiptap/pm@2.1.0-rc.12) + dev: false + /@tiptap/extension-text@2.1.0-rc.12(@tiptap/core@2.1.0-rc.12): resolution: {integrity: sha512-6rJvPkpypaEW+jM6oB9kMXg+wy7xbDnGBFLRvCuA4Tr5Y+S+i34CzcihyARr90p+scYkOl+6QYVww4oisRFskA==} peerDependencies: @@ -5959,6 +6058,10 @@ packages: resolution: {integrity: sha512-Nmh0K3iWQJzniTuPRcJn5hxXkfB1T1pgB89SBig5PlJQU5yocazeu4jATJlaA0GYFKWMqDdvYemoSnF2pXgLVA==} dev: true + /@types/geojson@7946.0.12: + resolution: {integrity: sha512-uK2z1ZHJyC0nQRbuovXFt4mzXDwf27vQeUWNhfKGwRcWW429GOhP8HxUHlM6TLH4bzmlv/HlEjpvJh3JfmGsAA==} + dev: false + /@types/graceful-fs@4.1.5: resolution: {integrity: sha512-anKkLmZZ+xm4p8JWBf4hElkM4XR+EZeA2M9BAkkTldmcyDY4mbdIJnRghDJH3Ov5ooY7/UAoENtmdMSkaAd7Cw==} dependencies: @@ -6043,6 +6146,18 @@ packages: resolution: {integrity: sha512-zmEmF5OIM3rb7SbLCFYoQhO4dGt2FRM9AMkxvA3LaADOF1n8in/zGJlWji9fmafLoNyz+FoL6FE0SLtGIArD7w==} dev: true + /@types/mapbox__point-geometry@0.1.3: + resolution: {integrity: sha512-2W46IOXlu7vC8m3+M5rDqSnuY22GFxxx3xhkoyqyPWrD+eP2iAwNst0A1+umLYjCTJMJTSpiofphn9h9k+Kw+w==} + dev: false + + /@types/mapbox__vector-tile@1.3.3: + resolution: {integrity: sha512-d263B3KCQtXKVZMHpMJrEW5EeLBsQ8jvAS9nhpUKC5hHIlQaACG9PWkW8qxEeNuceo9120AwPjeS91uNa4ltqA==} + dependencies: + '@types/geojson': 7946.0.12 + '@types/mapbox__point-geometry': 0.1.3 + '@types/pbf': 3.0.4 + dev: false + /@types/md5@2.3.2: resolution: {integrity: sha512-v+JFDu96+UYJ3/UWzB0mEglIS//MZXgRaJ4ubUPwOM0gvLc/kcQ3TWNYwENEK7/EcXGQVrW8h/XqednSjBd/Og==} dev: false @@ -6110,6 +6225,10 @@ packages: resolution: {integrity: sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA==} dev: true + /@types/pbf@3.0.4: + resolution: {integrity: sha512-SOFlLGZkLbEXJRwcWCqeP/Koyaf/uAqLXHUsdo/nMfjLsNd8kqauwHe9GBOljSmpcHp/LC6kOjo3SidGjNirVA==} + dev: false + /@types/pica@9.0.1: resolution: {integrity: sha512-hTsYxcy0MqIOKzeALuh3zOHyozBlndxV/bX9X52GBFq2XUQchZF6T0vcRYeT5P1ggmswi2LlIwHAH+bKWxxalg==} dev: true @@ -6257,6 +6376,12 @@ packages: resolution: {integrity: sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==} dev: true + /@types/supercluster@7.1.2: + resolution: {integrity: sha512-qMhofL945Z4njQUuntadexAgPtpiBC014WvVqU70Prj42LC77Xgmz04us7hSMmwjs7KbgAwGBmje+FSOvDbP0Q==} + dependencies: + '@types/geojson': 7946.0.12 + dev: false + /@types/testing-library__jest-dom@5.14.5: resolution: {integrity: sha512-SBwbxYoyPIvxHbeHxTZX2Pe/74F/tX2/D3mMvzabdeJ25bBojfW0TyB8BHrbq/9zaaKICJZjLP+8r6AeZMFCuQ==} dependencies: @@ -6312,8 +6437,8 @@ packages: resolution: {integrity: sha512-3NoqvZC2W5gAC5DZbTpCeJ251vGQmgcWIHQJGq2J240HY6ErQ9aWKkwfoKJlHLx+A83WPNTZ9+3cd2ILxbvr1w==} dev: true - /@typescript-eslint/eslint-plugin@6.4.0(@typescript-eslint/parser@6.4.0)(eslint@7.32.0)(typescript@4.9.5): - resolution: {integrity: sha512-62o2Hmc7Gs3p8SLfbXcipjWAa6qk2wZGChXG2JbBtYpwSRmti/9KHLqfbLs9uDigOexG+3PaQ9G2g3201FWLKg==} + /@typescript-eslint/eslint-plugin@6.9.0(@typescript-eslint/parser@6.9.0)(eslint@8.52.0)(typescript@4.9.5): + resolution: {integrity: sha512-lgX7F0azQwRPB7t7WAyeHWVfW1YJ9NIgd9mvGhfQpRY56X6AVf8mwM8Wol+0z4liE7XX3QOt8MN1rUKCfSjRIA==} engines: {node: ^16.0.0 || >=18.0.0} peerDependencies: '@typescript-eslint/parser': ^6.0.0 || ^6.0.0-alpha @@ -6324,13 +6449,13 @@ packages: optional: true dependencies: '@eslint-community/regexpp': 4.6.2 - '@typescript-eslint/parser': 6.4.0(eslint@7.32.0)(typescript@4.9.5) - '@typescript-eslint/scope-manager': 6.4.0 - '@typescript-eslint/type-utils': 6.4.0(eslint@7.32.0)(typescript@4.9.5) - '@typescript-eslint/utils': 6.4.0(eslint@7.32.0)(typescript@4.9.5) - '@typescript-eslint/visitor-keys': 6.4.0 + '@typescript-eslint/parser': 6.9.0(eslint@8.52.0)(typescript@4.9.5) + '@typescript-eslint/scope-manager': 6.9.0 + '@typescript-eslint/type-utils': 6.9.0(eslint@8.52.0)(typescript@4.9.5) + '@typescript-eslint/utils': 6.9.0(eslint@8.52.0)(typescript@4.9.5) + '@typescript-eslint/visitor-keys': 6.9.0 debug: 4.3.4(supports-color@8.1.1) - eslint: 7.32.0 + eslint: 8.52.0 graphemer: 1.4.0 ignore: 5.2.4 natural-compare: 1.4.0 @@ -6341,8 +6466,8 @@ packages: - supports-color dev: true - /@typescript-eslint/parser@6.4.0(eslint@7.32.0)(typescript@4.9.5): - resolution: {integrity: sha512-I1Ah1irl033uxjxO9Xql7+biL3YD7w9IU8zF+xlzD/YxY6a4b7DYA08PXUUCbm2sEljwJF6ERFy2kTGAGcNilg==} + /@typescript-eslint/parser@6.9.0(eslint@8.52.0)(typescript@4.9.5): + resolution: {integrity: sha512-GZmjMh4AJ/5gaH4XF2eXA8tMnHWP+Pm1mjQR2QN4Iz+j/zO04b9TOvJYOX2sCNIQHtRStKTxRY1FX7LhpJT4Gw==} engines: {node: ^16.0.0 || >=18.0.0} peerDependencies: eslint: ^7.0.0 || ^8.0.0 @@ -6351,12 +6476,12 @@ packages: typescript: optional: true dependencies: - '@typescript-eslint/scope-manager': 6.4.0 - '@typescript-eslint/types': 6.4.0 - '@typescript-eslint/typescript-estree': 6.4.0(typescript@4.9.5) - '@typescript-eslint/visitor-keys': 6.4.0 + '@typescript-eslint/scope-manager': 6.9.0 + '@typescript-eslint/types': 6.9.0 + '@typescript-eslint/typescript-estree': 6.9.0(typescript@4.9.5) + '@typescript-eslint/visitor-keys': 6.9.0 debug: 4.3.4(supports-color@8.1.1) - eslint: 7.32.0 + eslint: 8.52.0 typescript: 4.9.5 transitivePeerDependencies: - supports-color @@ -6370,16 +6495,16 @@ packages: '@typescript-eslint/visitor-keys': 5.55.0 dev: true - /@typescript-eslint/scope-manager@6.4.0: - resolution: {integrity: sha512-TUS7vaKkPWDVvl7GDNHFQMsMruD+zhkd3SdVW0d7b+7Zo+bd/hXJQ8nsiUZMi1jloWo6c9qt3B7Sqo+flC1nig==} + /@typescript-eslint/scope-manager@6.9.0: + resolution: {integrity: sha512-1R8A9Mc39n4pCCz9o79qRO31HGNDvC7UhPhv26TovDsWPBDx+Sg3rOZdCELIA3ZmNoWAuxaMOT7aWtGRSYkQxw==} engines: {node: ^16.0.0 || >=18.0.0} dependencies: - '@typescript-eslint/types': 6.4.0 - '@typescript-eslint/visitor-keys': 6.4.0 + '@typescript-eslint/types': 6.9.0 + '@typescript-eslint/visitor-keys': 6.9.0 dev: true - /@typescript-eslint/type-utils@6.4.0(eslint@7.32.0)(typescript@4.9.5): - resolution: {integrity: sha512-TvqrUFFyGY0cX3WgDHcdl2/mMCWCDv/0thTtx/ODMY1QhEiyFtv/OlLaNIiYLwRpAxAtOLOY9SUf1H3Q3dlwAg==} + /@typescript-eslint/type-utils@6.9.0(eslint@8.52.0)(typescript@4.9.5): + resolution: {integrity: sha512-XXeahmfbpuhVbhSOROIzJ+b13krFmgtc4GlEuu1WBT+RpyGPIA4Y/eGnXzjbDj5gZLzpAXO/sj+IF/x2GtTMjQ==} engines: {node: ^16.0.0 || >=18.0.0} peerDependencies: eslint: ^7.0.0 || ^8.0.0 @@ -6388,10 +6513,10 @@ packages: typescript: optional: true dependencies: - '@typescript-eslint/typescript-estree': 6.4.0(typescript@4.9.5) - '@typescript-eslint/utils': 6.4.0(eslint@7.32.0)(typescript@4.9.5) + '@typescript-eslint/typescript-estree': 6.9.0(typescript@4.9.5) + '@typescript-eslint/utils': 6.9.0(eslint@8.52.0)(typescript@4.9.5) debug: 4.3.4(supports-color@8.1.1) - eslint: 7.32.0 + eslint: 8.52.0 ts-api-utils: 1.0.2(typescript@4.9.5) typescript: 4.9.5 transitivePeerDependencies: @@ -6403,8 +6528,8 @@ packages: engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dev: true - /@typescript-eslint/types@6.4.0: - resolution: {integrity: sha512-+FV9kVFrS7w78YtzkIsNSoYsnOtrYVnKWSTVXoL1761CsCRv5wpDOINgsXpxD67YCLZtVQekDDyaxfjVWUJmmg==} + /@typescript-eslint/types@6.9.0: + resolution: {integrity: sha512-+KB0lbkpxBkBSiVCuQvduqMJy+I1FyDbdwSpM3IoBS7APl4Bu15lStPjgBIdykdRqQNYqYNMa8Kuidax6phaEw==} engines: {node: ^16.0.0 || >=18.0.0} dev: true @@ -6429,8 +6554,8 @@ packages: - supports-color dev: true - /@typescript-eslint/typescript-estree@6.4.0(typescript@4.9.5): - resolution: {integrity: sha512-iDPJArf/K2sxvjOR6skeUCNgHR/tCQXBsa+ee1/clRKr3olZjZ/dSkXPZjG6YkPtnW6p5D1egeEPMCW6Gn4yLA==} + /@typescript-eslint/typescript-estree@6.9.0(typescript@4.9.5): + resolution: {integrity: sha512-NJM2BnJFZBEAbCfBP00zONKXvMqihZCrmwCaik0UhLr0vAgb6oguXxLX1k00oQyD+vZZ+CJn3kocvv2yxm4awQ==} engines: {node: ^16.0.0 || >=18.0.0} peerDependencies: typescript: '*' @@ -6438,8 +6563,8 @@ packages: typescript: optional: true dependencies: - '@typescript-eslint/types': 6.4.0 - '@typescript-eslint/visitor-keys': 6.4.0 + '@typescript-eslint/types': 6.9.0 + '@typescript-eslint/visitor-keys': 6.9.0 debug: 4.3.4(supports-color@8.1.1) globby: 11.1.0 is-glob: 4.0.3 @@ -6450,19 +6575,19 @@ packages: - supports-color dev: true - /@typescript-eslint/utils@5.55.0(eslint@7.32.0)(typescript@4.9.5): + /@typescript-eslint/utils@5.55.0(eslint@8.52.0)(typescript@4.9.5): resolution: {integrity: sha512-FkW+i2pQKcpDC3AY6DU54yl8Lfl14FVGYDgBTyGKB75cCwV3KpkpTMFi9d9j2WAJ4271LR2HeC5SEWF/CZmmfw==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} peerDependencies: eslint: ^6.0.0 || ^7.0.0 || ^8.0.0 dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@7.32.0) + '@eslint-community/eslint-utils': 4.4.0(eslint@8.52.0) '@types/json-schema': 7.0.12 '@types/semver': 7.5.0 '@typescript-eslint/scope-manager': 5.55.0 '@typescript-eslint/types': 5.55.0 '@typescript-eslint/typescript-estree': 5.55.0(typescript@4.9.5) - eslint: 7.32.0 + eslint: 8.52.0 eslint-scope: 5.1.1 semver: 7.5.4 transitivePeerDependencies: @@ -6470,19 +6595,19 @@ packages: - typescript dev: true - /@typescript-eslint/utils@6.4.0(eslint@7.32.0)(typescript@4.9.5): - resolution: {integrity: sha512-BvvwryBQpECPGo8PwF/y/q+yacg8Hn/2XS+DqL/oRsOPK+RPt29h5Ui5dqOKHDlbXrAeHUTnyG3wZA0KTDxRZw==} + /@typescript-eslint/utils@6.9.0(eslint@8.52.0)(typescript@4.9.5): + resolution: {integrity: sha512-5Wf+Jsqya7WcCO8me504FBigeQKVLAMPmUzYgDbWchINNh1KJbxCgVya3EQ2MjvJMVeXl3pofRmprqX6mfQkjQ==} engines: {node: ^16.0.0 || >=18.0.0} peerDependencies: eslint: ^7.0.0 || ^8.0.0 dependencies: - '@eslint-community/eslint-utils': 4.4.0(eslint@7.32.0) + '@eslint-community/eslint-utils': 4.4.0(eslint@8.52.0) '@types/json-schema': 7.0.12 '@types/semver': 7.5.0 - '@typescript-eslint/scope-manager': 6.4.0 - '@typescript-eslint/types': 6.4.0 - '@typescript-eslint/typescript-estree': 6.4.0(typescript@4.9.5) - eslint: 7.32.0 + '@typescript-eslint/scope-manager': 6.9.0 + '@typescript-eslint/types': 6.9.0 + '@typescript-eslint/typescript-estree': 6.9.0(typescript@4.9.5) + eslint: 8.52.0 semver: 7.5.4 transitivePeerDependencies: - supports-color @@ -6497,14 +6622,18 @@ packages: eslint-visitor-keys: 3.4.3 dev: true - /@typescript-eslint/visitor-keys@6.4.0: - resolution: {integrity: sha512-yJSfyT+uJm+JRDWYRYdCm2i+pmvXJSMtPR9Cq5/XQs4QIgNoLcoRtDdzsLbLsFM/c6um6ohQkg/MLxWvoIndJA==} + /@typescript-eslint/visitor-keys@6.9.0: + resolution: {integrity: sha512-dGtAfqjV6RFOtIP8I0B4ZTBRrlTT8NHHlZZSchQx3qReaoDeXhYM++M4So2AgFK9ZB0emRPA6JI1HkafzA2Ibg==} engines: {node: ^16.0.0 || >=18.0.0} dependencies: - '@typescript-eslint/types': 6.4.0 + '@typescript-eslint/types': 6.9.0 eslint-visitor-keys: 3.4.3 dev: true + /@ungap/structured-clone@1.2.0: + resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==} + dev: true + /@webassemblyjs/ast@1.11.6: resolution: {integrity: sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q==} dependencies: @@ -6676,7 +6805,7 @@ packages: esbuild: '>=0.10.0' dependencies: esbuild: 0.18.20 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /@yarnpkg/fslib@2.10.3: @@ -6735,6 +6864,14 @@ packages: acorn: 7.4.1 dev: true + /acorn-jsx@5.3.2(acorn@8.10.0): + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + dependencies: + acorn: 8.10.0 + dev: true + /acorn-walk@7.2.0: resolution: {integrity: sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==} engines: {node: '>=0.4.0'} @@ -6996,7 +7133,6 @@ packages: /argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - dev: false /aria-hidden@1.2.1(@types/react@16.14.34)(react@16.14.0): resolution: {integrity: sha512-PN344VAf9j1EAi+jyVHOJ8XidQdPVssGco39eNcsGdM4wcsILtxrKLkbuiMfLWYROK1FjRQasMWCBttrhjnr6A==} @@ -7017,6 +7153,18 @@ packages: dependencies: deep-equal: 2.1.0 + /arr-union@3.1.0: + resolution: {integrity: sha512-sKpyeERZ02v1FeCZT8lrfJq5u6goHCtpTAzPwJYe7c8SPFOboNjNg1vz2L4VTn9T4PQxEx13TbXLmYUcS6Ug7Q==} + engines: {node: '>=0.10.0'} + dev: false + + /array-buffer-byte-length@1.0.0: + resolution: {integrity: sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==} + dependencies: + call-bind: 1.0.5 + is-array-buffer: 3.0.2 + dev: true + /array-flatten@1.1.1: resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} @@ -7054,9 +7202,9 @@ packages: resolution: {integrity: sha512-kDdugMl7id9COE8R7MHF5jWk7Dqt/fs4Pv+JXoICnYwqpjjjbUurz6w5fT5IG6brLdJhv6/VoHB0H7oyIBXd+Q==} engines: {node: '>= 0.4'} dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.4 + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 es-array-method-boxes-properly: 1.0.0 is-string: 1.0.7 dev: true @@ -7071,6 +7219,19 @@ packages: get-intrinsic: 1.1.3 dev: true + /arraybuffer.prototype.slice@1.0.2: + resolution: {integrity: sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==} + engines: {node: '>= 0.4'} + dependencies: + array-buffer-byte-length: 1.0.0 + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 + get-intrinsic: 1.2.2 + is-array-buffer: 3.0.2 + is-shared-array-buffer: 1.0.2 + dev: true + /asap@2.0.6: resolution: {integrity: sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==} dev: false @@ -7094,6 +7255,11 @@ packages: util: 0.12.5 dev: true + /assign-symbols@1.0.0: + resolution: {integrity: sha512-Q+JC7Whu8HhmTdBph/Tq59IoRtoy6KAm5zzPv00WdujX82lbAL8K7WVjne7vdCsAmbF4AYaDOPyO3k0kl8qIrw==} + engines: {node: '>=0.10.0'} + dev: false + /ast-metadata-inferer@0.8.0: resolution: {integrity: sha512-jOMKcHht9LxYIEQu+RVd22vtgrPaVCtDRQ/16IGmurdzxvYbDd5ynxjnyrzLnieG96eTcAyaoj/wN/4/1FyyeA==} dependencies: @@ -7104,21 +7270,21 @@ packages: resolution: {integrity: sha512-O0yuUDnZeQDL+ncNGlJ78BiO4jnYI3bvMsD5prT0/nsgijG/LpNBIr63gTjVTNsiGkgQhiyCShTgxt8oXOrklA==} engines: {node: '>=4'} dependencies: - tslib: 2.4.1 + tslib: 2.6.2 dev: true /ast-types@0.15.2: resolution: {integrity: sha512-c27loCv9QkZinsa5ProX751khO9DJl/AcB5c2KNtA6NRvHKS0PgLfcftz72KVq504vB0Gku5s2kUZzDBvQWvHg==} engines: {node: '>=4'} dependencies: - tslib: 2.4.1 + tslib: 2.6.2 dev: true /ast-types@0.16.1: resolution: {integrity: sha512-6t10qk83GOG8p0vKmaCr8eiilZwO171AvbROMtvvNiwrTly62t+7XkA8RdIIVbpMhCASAsxgAzdRSwh6nw/5Dg==} engines: {node: '>=4'} dependencies: - tslib: 2.4.1 + tslib: 2.6.2 dev: true /astral-regex@2.0.0: @@ -7138,6 +7304,12 @@ packages: resolution: {integrity: sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==} dev: true + /asynciterator.prototype@1.0.0: + resolution: {integrity: sha512-wwHYEIS0Q80f5mosx3L/dfG5t5rjEa9Ft51GTaNt862EnpyGHpgz2RkZvLPp1oF5TnAiTohkEKVEu8pQPJI7Vg==} + dependencies: + has-symbols: 1.0.3 + dev: true + /asynckit@0.4.0: resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==} dev: true @@ -7559,6 +7731,13 @@ packages: ieee754: 1.2.1 dev: true + /bundle-name@3.0.0: + resolution: {integrity: sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw==} + engines: {node: '>=12'} + dependencies: + run-applescript: 5.0.0 + dev: true + /bytes@3.0.0: resolution: {integrity: sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==} engines: {node: '>= 0.8'} @@ -7568,6 +7747,19 @@ packages: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} + /bytewise-core@1.2.3: + resolution: {integrity: sha512-nZD//kc78OOxeYtRlVk8/zXqTB4gf/nlguL1ggWA8FuchMyOxcyHR4QPQZMUmA7czC+YnaBrPUCubqAWe50DaA==} + dependencies: + typewise-core: 1.2.0 + dev: false + + /bytewise@1.1.0: + resolution: {integrity: sha512-rHuuseJ9iQ0na6UDhnrRVDh8YnWVlU6xM3VH6q/+yHDeUH2zIhUzP+2/h3LIrhLDBtTqzWpE3p3tP/boefskKQ==} + dependencies: + bytewise-core: 1.2.3 + typewise: 1.0.3 + dev: false + /c8@7.14.0: resolution: {integrity: sha512-i04rtkkcNcCf7zsQcSv/T9EbUn4RXQ6mropeMcjFOsQXQ0iGLAr/xT6TImQg4+U9hmNpN9XdvPkjUL1IzbgxJw==} engines: {node: '>=10.12.0'} @@ -7606,7 +7798,14 @@ packages: resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==} dependencies: function-bind: 1.1.1 - get-intrinsic: 1.1.3 + get-intrinsic: 1.2.2 + + /call-bind@1.0.5: + resolution: {integrity: sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==} + dependencies: + function-bind: 1.1.2 + get-intrinsic: 1.2.2 + set-function-length: 1.1.1 /caller-callsite@2.0.0: resolution: {integrity: sha512-JuG3qI4QOftFsZyOn1qq87fq5grLIyk1JYd5lJmdA+fG7aQ9pA/i3JIJGcO3q0MrRcHlOt1U+ZeHW8Dq9axALQ==} @@ -7636,7 +7835,7 @@ packages: resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==} dependencies: pascal-case: 3.1.2 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /camelcase-keys@7.0.2: @@ -8919,6 +9118,16 @@ packages: untildify: 4.0.0 dev: true + /default-browser@4.0.0: + resolution: {integrity: sha512-wX5pXO1+BrhMkSbROFsyxUm0i/cJEScyNhA4PPxc41ICuv05ZZB/MX28s8aZx6xjmatvebIapF6hLEKEcpneUA==} + engines: {node: '>=14.16'} + dependencies: + bundle-name: 3.0.0 + default-browser-id: 3.0.0 + execa: 7.2.0 + titleize: 3.0.0 + dev: true + /default-require-extensions@3.0.1: resolution: {integrity: sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==} engines: {node: '>=8'} @@ -8932,17 +9141,39 @@ packages: clone: 1.0.4 dev: true + /define-data-property@1.1.1: + resolution: {integrity: sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.2.2 + gopd: 1.0.1 + has-property-descriptors: 1.0.0 + /define-lazy-prop@2.0.0: resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} engines: {node: '>=8'} dev: true + /define-lazy-prop@3.0.0: + resolution: {integrity: sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==} + engines: {node: '>=12'} + dev: true + /define-properties@1.1.4: resolution: {integrity: sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==} engines: {node: '>= 0.4'} dependencies: has-property-descriptors: 1.0.0 object-keys: 1.1.1 + dev: true + + /define-properties@1.2.1: + resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} + engines: {node: '>= 0.4'} + dependencies: + define-data-property: 1.1.1 + has-property-descriptors: 1.0.0 + object-keys: 1.1.1 /defu@6.1.2: resolution: {integrity: sha512-+uO4+qr7msjNNWKYPHqN/3+Dx3NFkmIzayk2L1MyZQlvgZb/J1A0fo410dpKrN2SnqFjt8n4JL8fDJE0wIgjFQ==} @@ -9167,7 +9398,7 @@ packages: resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==} dependencies: no-case: 3.0.4 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /dot-prop@5.3.0: @@ -9196,6 +9427,10 @@ packages: stream-shift: 1.0.1 dev: true + /earcut@2.2.4: + resolution: {integrity: sha512-/pjZsA1b4RPHbeWZQn66SWS8nZZWLQQ23oE3Eam7aroEFGEvwKAsJfZ9ytiEMycfzXWpca4FA9QIOehf7PocBQ==} + dev: false + /eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} dev: true @@ -9331,12 +9566,12 @@ packages: es-to-primitive: 1.2.1 function-bind: 1.1.1 function.prototype.name: 1.1.5 - get-intrinsic: 1.1.3 + get-intrinsic: 1.2.2 get-symbol-description: 1.0.0 has: 1.0.3 has-property-descriptors: 1.0.0 has-symbols: 1.0.3 - internal-slot: 1.0.3 + internal-slot: 1.0.6 is-callable: 1.2.7 is-negative-zero: 2.0.2 is-regex: 1.1.4 @@ -9353,6 +9588,51 @@ packages: unbox-primitive: 1.0.2 dev: true + /es-abstract@1.22.3: + resolution: {integrity: sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==} + engines: {node: '>= 0.4'} + dependencies: + array-buffer-byte-length: 1.0.0 + arraybuffer.prototype.slice: 1.0.2 + available-typed-arrays: 1.0.5 + call-bind: 1.0.5 + es-set-tostringtag: 2.0.2 + es-to-primitive: 1.2.1 + function.prototype.name: 1.1.6 + get-intrinsic: 1.2.2 + get-symbol-description: 1.0.0 + globalthis: 1.0.3 + gopd: 1.0.1 + has-property-descriptors: 1.0.0 + has-proto: 1.0.1 + has-symbols: 1.0.3 + hasown: 2.0.0 + internal-slot: 1.0.6 + is-array-buffer: 3.0.2 + is-callable: 1.2.7 + is-negative-zero: 2.0.2 + is-regex: 1.1.4 + is-shared-array-buffer: 1.0.2 + is-string: 1.0.7 + is-typed-array: 1.1.12 + is-weakref: 1.0.2 + object-inspect: 1.13.1 + object-keys: 1.1.1 + object.assign: 4.1.4 + regexp.prototype.flags: 1.5.1 + safe-array-concat: 1.0.1 + safe-regex-test: 1.0.0 + string.prototype.trim: 1.2.8 + string.prototype.trimend: 1.0.7 + string.prototype.trimstart: 1.0.7 + typed-array-buffer: 1.0.0 + typed-array-byte-length: 1.0.0 + typed-array-byte-offset: 1.0.0 + typed-array-length: 1.0.4 + unbox-primitive: 1.0.2 + which-typed-array: 1.1.13 + dev: true + /es-array-method-boxes-properly@1.0.0: resolution: {integrity: sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==} dev: true @@ -9360,8 +9640,8 @@ packages: /es-get-iterator@1.1.2: resolution: {integrity: sha512-+DTO8GYwbMCwbywjimwZMHp8AuYXOS2JZFWoi2AlPOS3ebnII9w/NLpNZtA7A0YLaVDw+O7KFCeoIV7OPvM7hQ==} dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 + call-bind: 1.0.5 + get-intrinsic: 1.2.2 has-symbols: 1.0.3 is-arguments: 1.1.1 is-map: 2.0.2 @@ -9369,10 +9649,38 @@ packages: is-string: 1.0.7 isarray: 2.0.5 + /es-iterator-helpers@1.0.15: + resolution: {integrity: sha512-GhoY8uYqd6iwUl2kgjTm4CZAf6oo5mHK7BPqx3rKgx893YSsy0LGHV6gfqqQvZt/8xM8xeOnfXBCfqclMKkJ5g==} + dependencies: + asynciterator.prototype: 1.0.0 + call-bind: 1.0.2 + define-properties: 1.2.1 + es-abstract: 1.22.3 + es-set-tostringtag: 2.0.2 + function-bind: 1.1.1 + get-intrinsic: 1.2.2 + globalthis: 1.0.3 + has-property-descriptors: 1.0.0 + has-proto: 1.0.1 + has-symbols: 1.0.3 + internal-slot: 1.0.6 + iterator.prototype: 1.1.2 + safe-array-concat: 1.0.1 + dev: true + /es-module-lexer@1.3.0: resolution: {integrity: sha512-vZK7T0N2CBmBOixhmjdqx2gWVbFZ4DXZ/NyRMZVlJXPa7CyFS+/a4QQsDGDQy9ZfEzxFuNEsMLeQJnKP2p5/JA==} dev: true + /es-set-tostringtag@2.0.2: + resolution: {integrity: sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.2.2 + has-tostringtag: 1.0.0 + hasown: 2.0.0 + dev: true + /es-shim-unscopables@1.0.0: resolution: {integrity: sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w==} dependencies: @@ -9682,16 +9990,16 @@ packages: source-map: 0.6.1 dev: true - /eslint-config-prettier@8.8.0(eslint@7.32.0): - resolution: {integrity: sha512-wLbQiFre3tdGgpDv67NQKnJuTlcUVYHas3k+DZCc2U2BadthoEY4B7hLPvAxaqdyOGCzuLfii2fqGph10va7oA==} + /eslint-config-prettier@9.0.0(eslint@8.52.0): + resolution: {integrity: sha512-IcJsTkJae2S35pRsRAwoCE+925rJJStOdkKnLVgtE+tEpqU0EVVM7OqrwxqgptKdX29NUwC82I5pXsGFIgSevw==} hasBin: true peerDependencies: eslint: '>=7.0.0' dependencies: - eslint: 7.32.0 + eslint: 8.52.0 dev: true - /eslint-plugin-compat@4.2.0(eslint@7.32.0): + /eslint-plugin-compat@4.2.0(eslint@8.52.0): resolution: {integrity: sha512-RDKSYD0maWy5r7zb5cWQS+uSPc26mgOzdORJ8hxILmWM7S/Ncwky7BcAtXVY5iRbKjBdHsWU8Yg7hfoZjtkv7w==} engines: {node: '>=14.x'} peerDependencies: @@ -9701,34 +10009,34 @@ packages: ast-metadata-inferer: 0.8.0 browserslist: 4.21.10 caniuse-lite: 1.0.30001538 - eslint: 7.32.0 + eslint: 8.52.0 find-up: 5.0.0 lodash.memoize: 4.1.2 semver: 7.5.4 dev: true - /eslint-plugin-cypress@2.13.3(eslint@7.32.0): - resolution: {integrity: sha512-nAPjZE5WopCsgJwl3vHm5iafpV+ZRO76Z9hMyRygWhmg5ODXDPd+9MaPl7kdJ2azj+sO87H3P1PRnggIrz848g==} + /eslint-plugin-cypress@2.15.1(eslint@8.52.0): + resolution: {integrity: sha512-eLHLWP5Q+I4j2AWepYq0PgFEei9/s5LvjuSqWrxurkg1YZ8ltxdvMNmdSf0drnsNo57CTgYY/NIHHLRSWejR7w==} peerDependencies: eslint: '>= 3.2.1' dependencies: - eslint: 7.32.0 - globals: 11.12.0 + eslint: 8.52.0 + globals: 13.23.0 dev: true - /eslint-plugin-eslint-comments@3.2.0(eslint@7.32.0): + /eslint-plugin-eslint-comments@3.2.0(eslint@8.52.0): resolution: {integrity: sha512-0jkOl0hfojIHHmEHgmNdqv4fmh7300NdpA9FFpF7zaoLvB/QeXOGNLIo86oAveJFrfB1p05kC8hpEMHM8DwWVQ==} engines: {node: '>=6.5.0'} peerDependencies: eslint: '>=4.19.1' dependencies: escape-string-regexp: 1.0.5 - eslint: 7.32.0 + eslint: 8.52.0 ignore: 5.2.4 dev: true - /eslint-plugin-jest@27.2.3(@typescript-eslint/eslint-plugin@6.4.0)(eslint@7.32.0)(jest@29.3.1)(typescript@4.9.5): - resolution: {integrity: sha512-sRLlSCpICzWuje66Gl9zvdF6mwD5X86I4u55hJyFBsxYOsBCmT5+kSUjf+fkFWVMMgpzNEupjW8WzUqi83hJAQ==} + /eslint-plugin-jest@27.4.3(@typescript-eslint/eslint-plugin@6.9.0)(eslint@8.52.0)(jest@29.3.1)(typescript@4.9.5): + resolution: {integrity: sha512-7S6SmmsHsgIm06BAGCAxL+ABd9/IB3MWkz2pudj6Qqor2y1qQpWPfuFU4SG9pWj4xDjF0e+D7Llh5useuSzAZw==} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} peerDependencies: '@typescript-eslint/eslint-plugin': ^5.0.0 || ^6.0.0 @@ -9740,9 +10048,9 @@ packages: jest: optional: true dependencies: - '@typescript-eslint/eslint-plugin': 6.4.0(@typescript-eslint/parser@6.4.0)(eslint@7.32.0)(typescript@4.9.5) - '@typescript-eslint/utils': 5.55.0(eslint@7.32.0)(typescript@4.9.5) - eslint: 7.32.0 + '@typescript-eslint/eslint-plugin': 6.9.0(@typescript-eslint/parser@6.9.0)(eslint@8.52.0)(typescript@4.9.5) + '@typescript-eslint/utils': 5.55.0(eslint@8.52.0)(typescript@4.9.5) + eslint: 8.52.0 jest: 29.3.1(@types/node@18.11.9)(ts-node@10.9.1) transitivePeerDependencies: - supports-color @@ -9754,25 +10062,29 @@ packages: engines: {node: '>=5.0.0'} dev: true - /eslint-plugin-prettier@3.4.1(eslint-config-prettier@8.8.0)(eslint@7.32.0)(prettier@2.8.8): - resolution: {integrity: sha512-htg25EUYUeIhKHXjOinK4BgCcDwtLHjqaxCDsMy5nbnUMkKFvIhMVCp+5GFUXQ4Nr8lBsPqtGAqBenbpFqAA2g==} - engines: {node: '>=6.0.0'} + /eslint-plugin-prettier@5.0.1(eslint-config-prettier@9.0.0)(eslint@8.52.0)(prettier@2.8.8): + resolution: {integrity: sha512-m3u5RnR56asrwV/lDC4GHorlW75DsFfmUcjfCYylTUs85dBRnB7VM6xG8eCMJdeDRnppzmxZVf1GEPJvl1JmNg==} + engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: - eslint: '>=5.0.0' + '@types/eslint': '>=8.0.0' + eslint: '>=8.0.0' eslint-config-prettier: '*' - prettier: '>=1.13.0' + prettier: '>=3.0.0' peerDependenciesMeta: + '@types/eslint': + optional: true eslint-config-prettier: optional: true dependencies: - eslint: 7.32.0 - eslint-config-prettier: 8.8.0(eslint@7.32.0) + eslint: 8.52.0 + eslint-config-prettier: 9.0.0(eslint@8.52.0) prettier: 2.8.8 prettier-linter-helpers: 1.0.0 + synckit: 0.8.5 dev: true - /eslint-plugin-react@7.32.2(eslint@7.32.0): - resolution: {integrity: sha512-t2fBMa+XzonrrNkyVirzKlvn5RXzzPwRHtMvLAtVZrt8oxgnTQaYbU6SXTOO1mwQgp1y5+toMSKInnzGr0Knqg==} + /eslint-plugin-react@7.33.2(eslint@8.52.0): + resolution: {integrity: sha512-73QQMKALArI8/7xGLNI/3LylrEYrlKZSb5C9+q3OtOewTnMQi5cT+aE9E41sLCmli3I9PGGmD1yiZydyo4FEPw==} engines: {node: '>=4'} peerDependencies: eslint: ^3 || ^4 || ^5 || ^6 || ^7 || ^8 @@ -9781,7 +10093,8 @@ packages: array.prototype.flatmap: 1.3.1 array.prototype.tosorted: 1.1.1 doctrine: 2.1.0 - eslint: 7.32.0 + es-iterator-helpers: 1.0.15 + eslint: 8.52.0 estraverse: 5.3.0 jsx-ast-utils: 3.3.3 minimatch: 3.1.2 @@ -9795,15 +10108,15 @@ packages: string.prototype.matchall: 4.0.8 dev: true - /eslint-plugin-storybook@0.6.15(eslint@7.32.0)(typescript@4.9.5): + /eslint-plugin-storybook@0.6.15(eslint@8.52.0)(typescript@4.9.5): resolution: {integrity: sha512-lAGqVAJGob47Griu29KXYowI4G7KwMoJDOkEip8ujikuDLxU+oWJ1l0WL6F2oDO4QiyUFXvtDkEkISMOPzo+7w==} engines: {node: 12.x || 14.x || >= 16} peerDependencies: eslint: '>=6' dependencies: '@storybook/csf': 0.0.1 - '@typescript-eslint/utils': 5.55.0(eslint@7.32.0)(typescript@4.9.5) - eslint: 7.32.0 + '@typescript-eslint/utils': 5.55.0(eslint@8.52.0)(typescript@4.9.5) + eslint: 8.52.0 requireindex: 1.2.0 ts-dedent: 2.2.0 transitivePeerDependencies: @@ -9819,21 +10132,12 @@ packages: estraverse: 4.3.0 dev: true - /eslint-utils@2.1.0: - resolution: {integrity: sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==} - engines: {node: '>=6'} + /eslint-scope@7.2.2: + resolution: {integrity: sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: - eslint-visitor-keys: 1.3.0 - dev: true - - /eslint-visitor-keys@1.3.0: - resolution: {integrity: sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==} - engines: {node: '>=4'} - dev: true - - /eslint-visitor-keys@2.1.0: - resolution: {integrity: sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==} - engines: {node: '>=10'} + esrecurse: 4.3.0 + estraverse: 5.3.0 dev: true /eslint-visitor-keys@3.4.3: @@ -9841,62 +10145,60 @@ packages: engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dev: true - /eslint@7.32.0: - resolution: {integrity: sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==} - engines: {node: ^10.12.0 || >=12.0.0} + /eslint@8.52.0: + resolution: {integrity: sha512-zh/JHnaixqHZsolRB/w9/02akBk9EPrOs9JwcTP2ek7yL5bVvXuRariiaAjjoJ5DvuwQ1WAE/HsMz+w17YgBCg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} hasBin: true dependencies: - '@babel/code-frame': 7.12.11 - '@eslint/eslintrc': 0.4.3 - '@humanwhocodes/config-array': 0.5.0 + '@eslint-community/eslint-utils': 4.4.0(eslint@8.52.0) + '@eslint-community/regexpp': 4.6.2 + '@eslint/eslintrc': 2.1.2 + '@eslint/js': 8.52.0 + '@humanwhocodes/config-array': 0.11.13 + '@humanwhocodes/module-importer': 1.0.1 + '@nodelib/fs.walk': 1.2.8 + '@ungap/structured-clone': 1.2.0 ajv: 6.12.6 chalk: 4.1.2 cross-spawn: 7.0.3 debug: 4.3.4(supports-color@8.1.1) doctrine: 3.0.0 - enquirer: 2.3.6 escape-string-regexp: 4.0.0 - eslint-scope: 5.1.1 - eslint-utils: 2.1.0 - eslint-visitor-keys: 2.1.0 - espree: 7.3.1 - esquery: 1.4.0 + eslint-scope: 7.2.2 + eslint-visitor-keys: 3.4.3 + espree: 9.6.1 + esquery: 1.5.0 esutils: 2.0.3 fast-deep-equal: 3.1.3 file-entry-cache: 6.0.1 - functional-red-black-tree: 1.0.1 - glob-parent: 5.1.2 - globals: 13.17.0 - ignore: 4.0.6 - import-fresh: 3.3.0 + find-up: 5.0.0 + glob-parent: 6.0.2 + globals: 13.23.0 + graphemer: 1.4.0 + ignore: 5.2.4 imurmurhash: 0.1.4 is-glob: 4.0.3 - js-yaml: 3.14.1 + is-path-inside: 3.0.3 + js-yaml: 4.1.0 json-stable-stringify-without-jsonify: 1.0.1 levn: 0.4.1 lodash.merge: 4.6.2 minimatch: 3.1.2 natural-compare: 1.4.0 - optionator: 0.9.1 - progress: 2.0.3 - regexpp: 3.2.0 - semver: 7.5.4 + optionator: 0.9.3 strip-ansi: 6.0.1 - strip-json-comments: 3.1.1 - table: 6.8.1 text-table: 0.2.0 - v8-compile-cache: 2.3.0 transitivePeerDependencies: - supports-color dev: true - /espree@7.3.1: - resolution: {integrity: sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==} - engines: {node: ^10.12.0 || >=12.0.0} + /espree@9.6.1: + resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: - acorn: 7.4.1 - acorn-jsx: 5.3.2(acorn@7.4.1) - eslint-visitor-keys: 1.3.0 + acorn: 8.10.0 + acorn-jsx: 5.3.2(acorn@8.10.0) + eslint-visitor-keys: 3.4.3 dev: true /esprima@4.0.1: @@ -9905,8 +10207,8 @@ packages: hasBin: true dev: true - /esquery@1.4.0: - resolution: {integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==} + /esquery@1.5.0: + resolution: {integrity: sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==} engines: {node: '>=0.10'} dependencies: estraverse: 5.3.0 @@ -9988,6 +10290,21 @@ packages: strip-final-newline: 2.0.0 dev: true + /execa@7.2.0: + resolution: {integrity: sha512-UduyVP7TLB5IcAQl+OzLyLcS/l32W/GLg+AhHJ+ow40FOk2U3SAllPwR44v4vmdFwIWqpdwxxpQbF1n5ta9seA==} + engines: {node: ^14.18.0 || ^16.14.0 || >=18.0.0} + dependencies: + cross-spawn: 7.0.3 + get-stream: 6.0.1 + human-signals: 4.3.1 + is-stream: 3.0.0 + merge-stream: 2.0.0 + npm-run-path: 5.1.0 + onetime: 6.0.0 + signal-exit: 3.0.7 + strip-final-newline: 3.0.0 + dev: true + /executable@4.1.1: resolution: {integrity: sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==} engines: {node: '>=4'} @@ -10079,6 +10396,21 @@ packages: transitivePeerDependencies: - supports-color + /extend-shallow@2.0.1: + resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} + engines: {node: '>=0.10.0'} + dependencies: + is-extendable: 0.1.1 + dev: false + + /extend-shallow@3.0.2: + resolution: {integrity: sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q==} + engines: {node: '>=0.10.0'} + dependencies: + assign-symbols: 1.0.0 + is-extendable: 1.0.1 + dev: false + /extend@3.0.2: resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} @@ -10139,6 +10471,17 @@ packages: micromatch: 4.0.5 dev: true + /fast-glob@3.3.1: + resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==} + engines: {node: '>=8.6.0'} + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.5 + dev: true + /fast-json-parse@1.0.3: resolution: {integrity: sha512-FRWsaZRWEJ1ESVNbDWmsAlqDk96gPQezzLghafp5J4GUKjbCz3OkAHuZs5TuPEtkbVQERysLp9xv6c24fBm8Aw==} dev: true @@ -10561,18 +10904,27 @@ packages: /function-bind@1.1.1: resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} + /function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + /function.prototype.name@1.1.5: resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} engines: {node: '>= 0.4'} dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.4 + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 functions-have-names: 1.2.3 dev: true - /functional-red-black-tree@1.0.1: - resolution: {integrity: sha512-dsKNQNdj6xA3T+QlADDA7mOSlX0qiMINjn0cgr+eGHGsbSHzTabcIogz2+p/iqP1Xs6EP/sS2SbqH+brGTbq0g==} + /function.prototype.name@1.1.6: + resolution: {integrity: sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 + functions-have-names: 1.2.3 dev: true /functions-have-names@1.2.3: @@ -10587,6 +10939,10 @@ packages: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} + /geojson-vt@3.2.1: + resolution: {integrity: sha512-EvGQQi/zPrDA6zr6BnJD/YhwAkBP8nnJ9emh3EnHQKVMfg/MRVtPbMYdgVy/IaEmn4UfagD2a6fafPDL5hbtwg==} + dev: false + /get-caller-file@2.0.5: resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} engines: {node: 6.* || 8.* || >= 10.*} @@ -10599,7 +10955,15 @@ packages: has: 1.0.3 has-symbols: 1.0.3 - /get-nonce@1.0.1: + /get-intrinsic@1.2.2: + resolution: {integrity: sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==} + dependencies: + function-bind: 1.1.2 + has-proto: 1.0.1 + has-symbols: 1.0.3 + hasown: 2.0.0 + + /get-nonce@1.0.1: resolution: {integrity: sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==} engines: {node: '>=6'} dev: true @@ -10638,16 +11002,20 @@ packages: /get-stream@6.0.1: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} engines: {node: '>=10'} - dev: true /get-symbol-description@1.0.0: resolution: {integrity: sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==} engines: {node: '>= 0.4'} dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 + call-bind: 1.0.5 + get-intrinsic: 1.2.2 dev: true + /get-value@2.0.6: + resolution: {integrity: sha512-Ln0UQDlxH1BapMu3GPtf7CuYNwRZf2gwCuPqbyG6pB8WfmFpzqcy4xtAaAMUhnNqjMKTiCPZG2oMT3YSx8U2NA==} + engines: {node: '>=0.10.0'} + dev: false + /getos@3.2.1: resolution: {integrity: sha512-U56CfOK17OKgTVqozZjUKNdkfEv6jk5WISBJ8SHoagjE6L69zOwl3Z+O8myjY9MEW3i2HPWQBt/LTbCgcC973Q==} dependencies: @@ -10683,12 +11051,23 @@ packages: resolution: {integrity: sha512-4hYlStsEIaYeYvZTZwgD5yOS2WVP0dcDsOBqeImdEM8eLuclvv0IEMlQQ1kuA5DN4he7wVH1jsYtNe9uininxg==} dev: true + /gl-matrix@3.4.3: + resolution: {integrity: sha512-wcCp8vu8FT22BnvKVPjXa/ICBWRq/zjFfdofZy1WSpQZpphblv12/bOQLBC1rMM7SGOFS9ltVmKOHil5+Ml7gA==} + dev: false + /glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} engines: {node: '>= 6'} dependencies: is-glob: 4.0.3 + /glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + dependencies: + is-glob: 4.0.3 + dev: true + /glob-to-regexp@0.4.1: resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} dev: true @@ -10763,17 +11142,33 @@ packages: which: 1.3.1 dev: true + /global-prefix@3.0.0: + resolution: {integrity: sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==} + engines: {node: '>=6'} + dependencies: + ini: 1.3.8 + kind-of: 6.0.3 + which: 1.3.1 + dev: false + /globals@11.12.0: resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} engines: {node: '>=4'} - /globals@13.17.0: - resolution: {integrity: sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw==} + /globals@13.23.0: + resolution: {integrity: sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==} engines: {node: '>=8'} dependencies: type-fest: 0.20.2 dev: true + /globalthis@1.0.3: + resolution: {integrity: sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==} + engines: {node: '>= 0.4'} + dependencies: + define-properties: 1.2.1 + dev: true + /globby@11.1.0: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} engines: {node: '>=10'} @@ -10792,7 +11187,7 @@ packages: /gopd@1.0.1: resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} dependencies: - get-intrinsic: 1.1.3 + get-intrinsic: 1.2.2 /graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} @@ -10845,7 +11240,11 @@ packages: /has-property-descriptors@1.0.0: resolution: {integrity: sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==} dependencies: - get-intrinsic: 1.1.3 + get-intrinsic: 1.2.2 + + /has-proto@1.0.1: + resolution: {integrity: sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==} + engines: {node: '>= 0.4'} /has-symbols@1.0.3: resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==} @@ -10861,7 +11260,7 @@ packages: resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==} engines: {node: '>= 0.4.0'} dependencies: - function-bind: 1.1.1 + function-bind: 1.1.2 /hasha@5.2.2: resolution: {integrity: sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==} @@ -10871,6 +11270,12 @@ packages: type-fest: 0.8.1 dev: true + /hasown@2.0.0: + resolution: {integrity: sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==} + engines: {node: '>= 0.4'} + dependencies: + function-bind: 1.1.2 + /hast-util-parse-selector@2.2.5: resolution: {integrity: sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ==} @@ -11091,6 +11496,11 @@ packages: engines: {node: '>=10.17.0'} dev: true + /human-signals@4.3.1: + resolution: {integrity: sha512-nZXjEF2nbo7lIw3mgYjItAfgQXog3OjJogSbKa2CQIIvSGWcKgeJnQlNXip6NglNzYH45nSRiEVimMvYL8DDqQ==} + engines: {node: '>=14.18.0'} + dev: true + /humps@2.0.1: resolution: {integrity: sha512-E0eIbrFWUhwfXJmsbdjRQFQPrl5pTEoKlz163j1mTqqUnU9PgR4AgB8AIITzuB3vLBdxZXyZ9TDIrwB2OASz4g==} dev: false @@ -11135,12 +11545,6 @@ packages: /ieee754@1.2.1: resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - dev: true - - /ignore@4.0.6: - resolution: {integrity: sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==} - engines: {node: '>= 4'} - dev: true /ignore@5.2.4: resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==} @@ -11214,7 +11618,6 @@ packages: /ini@1.3.8: resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} - dev: true /ini@2.0.0: resolution: {integrity: sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==} @@ -11252,11 +11655,20 @@ packages: resolution: {integrity: sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==} engines: {node: '>= 0.4'} dependencies: - get-intrinsic: 1.1.3 + get-intrinsic: 1.2.2 has: 1.0.3 side-channel: 1.0.4 dev: true + /internal-slot@1.0.6: + resolution: {integrity: sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==} + engines: {node: '>= 0.4'} + dependencies: + get-intrinsic: 1.2.2 + hasown: 2.0.0 + side-channel: 1.0.4 + dev: true + /internmap@1.0.1: resolution: {integrity: sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==} dev: false @@ -11310,9 +11722,17 @@ packages: resolution: {integrity: sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA==} engines: {node: '>= 0.4'} dependencies: - call-bind: 1.0.2 + call-bind: 1.0.5 has-tostringtag: 1.0.0 + /is-array-buffer@3.0.2: + resolution: {integrity: sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==} + dependencies: + call-bind: 1.0.5 + get-intrinsic: 1.2.2 + is-typed-array: 1.1.12 + dev: true + /is-arrayish@0.2.1: resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} dev: true @@ -11321,6 +11741,13 @@ packages: resolution: {integrity: sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==} dev: true + /is-async-function@2.0.0: + resolution: {integrity: sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==} + engines: {node: '>= 0.4'} + dependencies: + has-tostringtag: 1.0.0 + dev: true + /is-bigint@1.0.4: resolution: {integrity: sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==} dependencies: @@ -11336,7 +11763,7 @@ packages: resolution: {integrity: sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==} engines: {node: '>= 0.4'} dependencies: - call-bind: 1.0.2 + call-bind: 1.0.5 has-tostringtag: 1.0.0 /is-buffer@1.1.6: @@ -11399,6 +11826,17 @@ packages: hasBin: true dev: true + /is-docker@3.0.0: + resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + hasBin: true + dev: true + + /is-extendable@0.1.1: + resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} + engines: {node: '>=0.10.0'} + dev: false + /is-extendable@1.0.1: resolution: {integrity: sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==} engines: {node: '>=0.10.0'} @@ -11410,6 +11848,12 @@ packages: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} + /is-finalizationregistry@1.0.2: + resolution: {integrity: sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==} + dependencies: + call-bind: 1.0.5 + dev: true + /is-fullwidth-code-point@2.0.0: resolution: {integrity: sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==} engines: {node: '>=4'} @@ -11446,6 +11890,14 @@ packages: /is-hexadecimal@1.0.4: resolution: {integrity: sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw==} + /is-inside-container@1.0.0: + resolution: {integrity: sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==} + engines: {node: '>=14.16'} + hasBin: true + dependencies: + is-docker: 3.0.0 + dev: true + /is-installed-globally@0.4.0: resolution: {integrity: sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==} engines: {node: '>=10'} @@ -11466,8 +11918,8 @@ packages: resolution: {integrity: sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==} engines: {node: '>= 0.4'} dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 + call-bind: 1.0.5 + define-properties: 1.2.1 dev: true /is-negative-zero@2.0.2: @@ -11533,7 +11985,7 @@ packages: resolution: {integrity: sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==} engines: {node: '>= 0.4'} dependencies: - call-bind: 1.0.2 + call-bind: 1.0.5 has-tostringtag: 1.0.0 /is-regexp@1.0.0: @@ -11551,7 +12003,7 @@ packages: /is-shared-array-buffer@1.0.2: resolution: {integrity: sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==} dependencies: - call-bind: 1.0.2 + call-bind: 1.0.5 dev: true /is-stream@2.0.1: @@ -11559,6 +12011,11 @@ packages: engines: {node: '>=8'} dev: true + /is-stream@3.0.0: + resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dev: true + /is-string@1.0.7: resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} engines: {node: '>= 0.4'} @@ -11576,10 +12033,17 @@ packages: engines: {node: '>= 0.4'} dependencies: available-typed-arrays: 1.0.5 - call-bind: 1.0.2 + call-bind: 1.0.5 for-each: 0.3.3 gopd: 1.0.1 has-tostringtag: 1.0.0 + dev: true + + /is-typed-array@1.1.12: + resolution: {integrity: sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==} + engines: {node: '>= 0.4'} + dependencies: + which-typed-array: 1.1.13 /is-typedarray@1.0.0: resolution: {integrity: sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==} @@ -11596,14 +12060,14 @@ packages: /is-weakref@1.0.2: resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} dependencies: - call-bind: 1.0.2 + call-bind: 1.0.5 dev: true /is-weakset@2.0.2: resolution: {integrity: sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==} dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 + call-bind: 1.0.5 + get-intrinsic: 1.2.2 /is-what@3.14.1: resolution: {integrity: sha512-sNxgpk9793nzSs7bA6JQJGeIuRBQhAaNGG77kzYQgMkrID+lS6SlK07K5LaptscDlSaIgH+GPFzf+d75FVxozA==} @@ -11634,7 +12098,6 @@ packages: /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - dev: true /isobject@3.0.1: resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} @@ -11721,6 +12184,16 @@ packages: istanbul-lib-report: 3.0.0 dev: true + /iterator.prototype@1.1.2: + resolution: {integrity: sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w==} + dependencies: + define-properties: 1.2.1 + get-intrinsic: 1.2.2 + has-symbols: 1.0.3 + reflect.getprototypeof: 1.0.4 + set-function-name: 2.0.1 + dev: true + /jackspeak@2.3.0: resolution: {integrity: sha512-uKmsITSsF4rUWQHzqaRUuyAir3fZfW3f202Ee34lz/gZCi970CPZwyQXLGNgWJvvZbvFyzeyGq0+4fcG/mBKZg==} engines: {node: '>=14'} @@ -12798,6 +13271,13 @@ packages: esprima: 4.0.1 dev: true + /js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + dependencies: + argparse: 2.0.1 + dev: true + /jsbn@0.1.1: resolution: {integrity: sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==} dev: true @@ -12906,6 +13386,10 @@ packages: resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} dev: true + /json-stringify-pretty-compact@3.0.0: + resolution: {integrity: sha512-Rc2suX5meI0S3bfdZuA7JMFBGkJ875ApfVyq2WHELjBiiG22My/l7/8zPpH/CfFVQHuVLd8NLR0nv6vi0BYYKA==} + dev: false + /json-stringify-safe@5.0.1: resolution: {integrity: sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==} dev: true @@ -12956,6 +13440,10 @@ packages: object.assign: 4.1.4 dev: true + /kdbush@4.0.2: + resolution: {integrity: sha512-WbCVYJ27Sz8zi9Q7Q0xHC+05iwkm3Znipc2XTlrnJbsHMYktW4hPhXUE8Ys1engBrvffoSCqbil1JQAa7clRpA==} + dev: false + /kea-forms@3.0.3(kea@3.1.5): resolution: {integrity: sha512-ApiirM7K103ULa0hNNcJHiJ0ffvuVIn9Nwg4wsEadfyraV9GLrWVbUeZWW0qFI2zTlkizDplM/gc3gGUfQTs9g==} peerDependencies: @@ -13054,7 +13542,6 @@ packages: /kind-of@6.0.3: resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} engines: {node: '>=0.10.0'} - dev: true /kleur@3.0.3: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} @@ -13301,10 +13788,6 @@ packages: resolution: {integrity: sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==} dev: true - /lodash.truncate@4.4.2: - resolution: {integrity: sha512-jttmRe7bRse52OsWIMDLaXxWqRAmtIUccAQ3garviCqJjafXOfNMO0yMfNpdD6zbGaTU0P5Nz7e7gAT6cKmJRw==} - dev: true - /lodash.uniq@4.5.0: resolution: {integrity: sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==} dev: true @@ -13343,7 +13826,7 @@ packages: /lower-case@2.0.2: resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==} dependencies: - tslib: 2.4.1 + tslib: 2.6.2 dev: true /lowlight@1.20.0: @@ -13406,6 +13889,37 @@ packages: resolution: {integrity: sha512-0aF7ZmVon1igznGI4VS30yugpduQW3y3GkcgGJOp7d8x8QrizhigUxjI/m2UojsXXto+jLAH3KSz+xOJTiORjg==} dev: true + /maplibre-gl@3.5.1: + resolution: {integrity: sha512-XFpqAKjpm7Y6cV3B1MDZ3FGUCXyrfeM2QkXloKc4x2QK9/e6/BEHdVebtxXcTrwdzpQexKrMqzdYCbaobJRNrw==} + engines: {node: '>=16.14.0', npm: '>=8.1.0'} + dependencies: + '@mapbox/geojson-rewind': 0.5.2 + '@mapbox/jsonlint-lines-primitives': 2.0.2 + '@mapbox/point-geometry': 0.1.0 + '@mapbox/tiny-sdf': 2.0.6 + '@mapbox/unitbezier': 0.0.1 + '@mapbox/vector-tile': 1.3.1 + '@mapbox/whoots-js': 3.1.0 + '@maplibre/maplibre-gl-style-spec': 19.3.3 + '@types/geojson': 7946.0.12 + '@types/mapbox__point-geometry': 0.1.3 + '@types/mapbox__vector-tile': 1.3.3 + '@types/pbf': 3.0.4 + '@types/supercluster': 7.1.2 + earcut: 2.2.4 + geojson-vt: 3.2.1 + gl-matrix: 3.4.3 + global-prefix: 3.0.0 + kdbush: 4.0.2 + murmurhash-js: 1.0.0 + pbf: 3.2.1 + potpack: 2.0.0 + quickselect: 2.0.0 + supercluster: 8.0.1 + tinyqueue: 2.0.3 + vt-pbf: 3.1.3 + dev: false + /markdown-it@13.0.1: resolution: {integrity: sha512-lTlxriVoy2criHP0JKRhO2VDG9c2ypWCsT237eDiLqi09rmbKoUetyGHq2uOIRoRS//kfoJckS0eUzzkDR+k2Q==} hasBin: true @@ -13561,6 +14075,11 @@ packages: engines: {node: '>=6'} dev: true + /mimic-fn@4.0.0: + resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} + engines: {node: '>=12'} + dev: true + /min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==} engines: {node: '>=4'} @@ -13588,7 +14107,6 @@ packages: /minimist@1.2.8: resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} - dev: true /minipass@3.3.6: resolution: {integrity: sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==} @@ -13725,6 +14243,10 @@ packages: object-assign: 4.1.1 dev: false + /murmurhash-js@1.0.0: + resolution: {integrity: sha512-TvmkNhkv8yct0SVBSy+o8wYzXjE4Zz3PCesbfs8HiCXXdcTuocApFv11UWlNFWKYsP2okqrhb7JNlSm9InBhIw==} + dev: false + /mute-stream@0.0.8: resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} dev: true @@ -13797,7 +14319,7 @@ packages: resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==} dependencies: lower-case: 2.0.2 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /node-abort-controller@3.1.1: @@ -13870,6 +14392,13 @@ packages: path-key: 3.1.1 dev: true + /npm-run-path@5.1.0: + resolution: {integrity: sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + path-key: 4.0.0 + dev: true + /nth-check@1.0.2: resolution: {integrity: sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg==} dependencies: @@ -13929,12 +14458,16 @@ packages: /object-inspect@1.12.2: resolution: {integrity: sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==} + /object-inspect@1.13.1: + resolution: {integrity: sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==} + dev: true + /object-is@1.1.5: resolution: {integrity: sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw==} engines: {node: '>= 0.4'} dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 + call-bind: 1.0.5 + define-properties: 1.2.1 /object-keys@1.1.1: resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} @@ -13945,7 +14478,7 @@ packages: engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 - define-properties: 1.1.4 + define-properties: 1.2.1 has-symbols: 1.0.3 object-keys: 1.1.1 @@ -13972,9 +14505,9 @@ packages: engines: {node: '>= 0.8'} dependencies: array.prototype.reduce: 1.0.5 - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.4 + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 dev: true /object.hasown@1.1.2: @@ -14035,6 +14568,13 @@ packages: mimic-fn: 2.1.0 dev: true + /onetime@6.0.0: + resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} + engines: {node: '>=12'} + dependencies: + mimic-fn: 4.0.0 + dev: true + /open@8.4.2: resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} engines: {node: '>=12'} @@ -14044,16 +14584,26 @@ packages: is-wsl: 2.2.0 dev: true - /optionator@0.9.1: - resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==} + /open@9.1.0: + resolution: {integrity: sha512-OS+QTnw1/4vrf+9hh1jc1jnYjzSG4ttTBB8UxOwAnInG3Uo4ssetzC1ihqaIHjLJnA5GGlRl6QlZXOTQhRBUvg==} + engines: {node: '>=14.16'} + dependencies: + default-browser: 4.0.0 + define-lazy-prop: 3.0.0 + is-inside-container: 1.0.0 + is-wsl: 2.2.0 + dev: true + + /optionator@0.9.3: + resolution: {integrity: sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==} engines: {node: '>= 0.8.0'} dependencies: + '@aashutoshrathi/word-wrap': 1.2.6 deep-is: 0.1.4 fast-levenshtein: 2.0.6 levn: 0.4.1 prelude-ls: 1.2.1 type-check: 0.4.0 - word-wrap: 1.2.4 dev: true /ora@5.4.1: @@ -14183,7 +14733,7 @@ packages: resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} dependencies: dot-case: 3.0.4 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /parent-module@1.0.1: @@ -14245,7 +14795,7 @@ packages: resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==} dependencies: no-case: 3.0.4 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /path-browserify@1.0.1: @@ -14277,6 +14827,11 @@ packages: engines: {node: '>=8'} dev: true + /path-key@4.0.0: + resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} + engines: {node: '>=12'} + dev: true + /path-parse@1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} @@ -14304,6 +14859,14 @@ packages: resolution: {integrity: sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q==} dev: true + /pbf@3.2.1: + resolution: {integrity: sha512-ClrV7pNOn7rtmoQVF4TS1vyU0WhYRnP92fzbfF75jAIwpnzdJXf8iTd4CMEqO4yUenH6NDqLiwjqlh6QgZzgLQ==} + hasBin: true + dependencies: + ieee754: 1.2.1 + resolve-protobuf-schema: 2.1.0 + dev: false + /peek-stream@1.1.3: resolution: {integrity: sha512-FhJ+YbOSBb9/rIl2ZeE/QHEsWn7PqNYt8ARAY3kIgNGOk13g9FGyIY6JIl/xB/3TFRVoTv5as0l11weORrTekA==} dependencies: @@ -14813,12 +15376,16 @@ packages: resolution: {integrity: sha512-tlkBdypJuvK/s00n4EiQjwYVfuuZv6vt8BF3g1ooIQa2Gz9Vz80p8q3qsPLZ0V5ErGRy6i3Q4fWC9TDzR7GNRQ==} dev: false - /posthog-js@1.85.3: - resolution: {integrity: sha512-RHWLDamFU1k4SjolS/DWr8tYbuWQaJUJywxlKTUJp/ESIe9bJz9SkWuDNqgTV58fJnxdjCCtD89o+gMimfOSGw==} + /posthog-js@1.86.0: + resolution: {integrity: sha512-tDvgE8N0RqBNv8ALW1FVFFqc6nnJkn6bJzR62/6v4CyUdgCa+yk0foFdahOsi/9zOiHVsPs6L++zV3rs6DhHsQ==} dependencies: fflate: 0.4.8 dev: false + /potpack@2.0.0: + resolution: {integrity: sha512-Q+/tYsFU9r7xoOJ+y/ZTtdVQwTWfzjbiXBDMM/JKUux3+QPP02iUuIoeBQ+Ot6oEDlC+/PGjB/5A3K7KKb7hcw==} + dev: false + /prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} engines: {node: '>= 0.8.0'} @@ -15077,6 +15644,10 @@ packages: prosemirror-transform: 1.7.1 dev: false + /protocol-buffers-schema@3.6.0: + resolution: {integrity: sha512-TdDRD+/QNdrCGCE7v8340QyuXd4kIWIgapsE2+n/SaGiSSbomYl4TjHlvIoCWRpE7wFt02EpB35VVA2ImcBVqw==} + dev: false + /proxy-addr@2.0.7: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} @@ -15194,6 +15765,10 @@ packages: engines: {node: '>=10'} dev: false + /quickselect@2.0.0: + resolution: {integrity: sha512-RKJ22hX8mHe3Y6wH/N3wCM6BWtjaxIyyUIkpHOvfFnxdI4yD4tBXEBKSbriGujF6jnSVkJrffuo6vxACiSSxIw==} + dev: false + /ramda@0.29.0: resolution: {integrity: sha512-BBea6L67bYLtdbOqfp8f58fPMqEwx0doL+pAi8TZyp2YWz8R9G8z9x75CZI8W+ftqhFHCpEX2cRnUUXK130iKA==} dev: true @@ -15931,7 +16506,7 @@ packages: '@types/react': 16.14.34 react: 16.14.0 react-style-singleton: 2.2.1(@types/react@16.14.34)(react@16.14.0) - tslib: 2.4.1 + tslib: 2.6.2 dev: true /react-remove-scroll@2.5.5(@types/react@16.14.34)(react@16.14.0): @@ -16004,7 +16579,7 @@ packages: get-nonce: 1.0.1 invariant: 2.2.4 react: 16.14.0 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /react-syntax-highlighter@15.5.0(react@16.14.0): @@ -16190,7 +16765,7 @@ packages: ast-types: 0.15.2 esprima: 4.0.1 source-map: 0.6.1 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /recast@0.23.4: @@ -16225,6 +16800,18 @@ packages: '@babel/runtime': 7.22.10 dev: false + /reflect.getprototypeof@1.0.4: + resolution: {integrity: sha512-ECkTw8TmJwW60lOTR+ZkODISW6RQ8+2CL3COqtiJKLd6MmB45hN51HprHFziKLGkAuTGQhBb91V8cy+KHlaCjw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 + get-intrinsic: 1.2.2 + globalthis: 1.0.3 + which-builtin-type: 1.1.3 + dev: true + /refractor@3.6.0: resolution: {integrity: sha512-MY9W41IOWxxk31o+YvFCNyNzdkc9M20NoZK5vq6jkv4I/uh2zkWcfudj0Q1fovjUQJrNewS9NMzeTtqPf+n5EA==} dependencies: @@ -16257,12 +16844,16 @@ packages: engines: {node: '>= 0.4'} dependencies: call-bind: 1.0.2 - define-properties: 1.1.4 + define-properties: 1.2.1 functions-have-names: 1.2.3 - /regexpp@3.2.0: - resolution: {integrity: sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==} - engines: {node: '>=8'} + /regexp.prototype.flags@1.5.1: + resolution: {integrity: sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.5 + define-properties: 1.2.1 + set-function-name: 2.0.1 dev: true /regexpu-core@5.3.2: @@ -16398,6 +16989,12 @@ packages: engines: {node: '>=8'} dev: true + /resolve-protobuf-schema@2.1.0: + resolution: {integrity: sha512-kI5ffTiZWmJaS/huM8wZfEMer1eRd7oJQhDuxeCLe3t7N7mX3z94CN0xPxBQxFYQTSNz9T0i+v6inKqSdK8xrQ==} + dependencies: + protocol-buffers-schema: 3.6.0 + dev: false + /resolve.exports@1.1.0: resolution: {integrity: sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==} engines: {node: '>=10'} @@ -16503,6 +17100,13 @@ packages: '@babel/runtime': 7.22.10 dev: false + /run-applescript@5.0.0: + resolution: {integrity: sha512-XcT5rBksx1QdIhlFOCtgZkB99ZEouFZ1E2Kc2LHqNW13U3/74YGdkQRmThTwxy4QIyookibDKYZOPqX//6BlAg==} + engines: {node: '>=12'} + dependencies: + execa: 5.1.1 + dev: true + /run-async@2.4.1: resolution: {integrity: sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==} engines: {node: '>=0.12.0'} @@ -16528,7 +17132,17 @@ packages: /rxjs@7.5.7: resolution: {integrity: sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA==} dependencies: - tslib: 2.4.1 + tslib: 2.6.2 + dev: true + + /safe-array-concat@1.0.1: + resolution: {integrity: sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==} + engines: {node: '>=0.4'} + dependencies: + call-bind: 1.0.2 + get-intrinsic: 1.2.2 + has-symbols: 1.0.3 + isarray: 2.0.5 dev: true /safe-buffer@5.1.2: @@ -16541,8 +17155,8 @@ packages: /safe-regex-test@1.0.0: resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==} dependencies: - call-bind: 1.0.2 - get-intrinsic: 1.1.3 + call-bind: 1.0.5 + get-intrinsic: 1.2.2 is-regex: 1.1.4 dev: true @@ -16708,11 +17322,39 @@ packages: resolution: {integrity: sha512-1jeBGaKNGdEq4FgIrORu/N570dwoPYio8lSoYLWmX7sQ//0JY08Xh9o5pBcgmHQ/MbsYp/aZnOe1s1lIsbLprQ==} dev: true + /set-function-length@1.1.1: + resolution: {integrity: sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==} + engines: {node: '>= 0.4'} + dependencies: + define-data-property: 1.1.1 + get-intrinsic: 1.2.2 + gopd: 1.0.1 + has-property-descriptors: 1.0.0 + + /set-function-name@2.0.1: + resolution: {integrity: sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==} + engines: {node: '>= 0.4'} + dependencies: + define-data-property: 1.1.1 + functions-have-names: 1.2.3 + has-property-descriptors: 1.0.0 + dev: true + /set-harmonic-interval@1.0.1: resolution: {integrity: sha512-AhICkFV84tBP1aWqPwLZqFvAwqEoVA9kxNMniGEUvzOlm4vLmOFLiTT3UZ6bziJTy4bOVpzWGTfSCbmaayGx8g==} engines: {node: '>=6.9'} dev: false + /set-value@2.0.1: + resolution: {integrity: sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==} + engines: {node: '>=0.10.0'} + dependencies: + extend-shallow: 2.0.1 + is-extendable: 0.1.1 + is-plain-object: 2.0.4 + split-string: 3.1.0 + dev: false + /setimmediate@1.0.5: resolution: {integrity: sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA==} dev: false @@ -16747,7 +17389,7 @@ packages: resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==} dependencies: call-bind: 1.0.2 - get-intrinsic: 1.1.3 + get-intrinsic: 1.2.2 object-inspect: 1.12.2 /signal-exit@3.0.7: @@ -16804,6 +17446,28 @@ packages: is-fullwidth-code-point: 3.0.0 dev: true + /sort-asc@0.2.0: + resolution: {integrity: sha512-umMGhjPeHAI6YjABoSTrFp2zaBtXBej1a0yKkuMUyjjqu6FJsTF+JYwCswWDg+zJfk/5npWUUbd33HH/WLzpaA==} + engines: {node: '>=0.10.0'} + dev: false + + /sort-desc@0.2.0: + resolution: {integrity: sha512-NqZqyvL4VPW+RAxxXnB8gvE1kyikh8+pR+T+CXLksVRN9eiQqkQlPwqWYU0mF9Jm7UnctShlxLyAt1CaBOTL1w==} + engines: {node: '>=0.10.0'} + dev: false + + /sort-object@3.0.3: + resolution: {integrity: sha512-nK7WOY8jik6zaG9CRwZTaD5O7ETWDLZYMM12pqY8htll+7dYeqGfEUPcUBHOpSJg2vJOrvFIY2Dl5cX2ih1hAQ==} + engines: {node: '>=0.10.0'} + dependencies: + bytewise: 1.1.0 + get-value: 2.0.6 + is-extendable: 0.1.1 + sort-asc: 0.2.0 + sort-desc: 0.2.0 + union-value: 1.0.1 + dev: false + /source-map-js@1.0.2: resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} engines: {node: '>=0.10.0'} @@ -16893,6 +17557,13 @@ packages: resolution: {integrity: sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA==} dev: true + /split-string@3.1.0: + resolution: {integrity: sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==} + engines: {node: '>=0.10.0'} + dependencies: + extend-shallow: 3.0.2 + dev: false + /sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} dev: true @@ -17081,20 +17752,45 @@ packages: side-channel: 1.0.4 dev: true + /string.prototype.trim@1.2.8: + resolution: {integrity: sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 + dev: true + /string.prototype.trimend@1.0.5: resolution: {integrity: sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==} dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.4 + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 + dev: true + + /string.prototype.trimend@1.0.7: + resolution: {integrity: sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==} + dependencies: + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 dev: true /string.prototype.trimstart@1.0.5: resolution: {integrity: sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==} dependencies: - call-bind: 1.0.2 - define-properties: 1.1.4 - es-abstract: 1.20.4 + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 + dev: true + + /string.prototype.trimstart@1.0.7: + resolution: {integrity: sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==} + dependencies: + call-bind: 1.0.5 + define-properties: 1.2.1 + es-abstract: 1.22.3 dev: true /string_decoder@1.1.1: @@ -17149,6 +17845,11 @@ packages: engines: {node: '>=6'} dev: true + /strip-final-newline@3.0.0: + resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} + engines: {node: '>=12'} + dev: true + /strip-indent@3.0.0: resolution: {integrity: sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==} engines: {node: '>=8'} @@ -17207,6 +17908,12 @@ packages: ts-interface-checker: 0.1.13 dev: true + /supercluster@8.0.1: + resolution: {integrity: sha512-IiOea5kJ9iqzD2t7QJq/cREyLHTtSmUT6gQsweojg9WH2sYJqZK9SswTu6jrscO6D1G5v5vYZ9ru/eq85lXeZQ==} + dependencies: + kdbush: 4.0.2 + dev: false + /supports-color@5.5.0: resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==} engines: {node: '>=4'} @@ -17283,21 +17990,18 @@ packages: resolution: {integrity: sha512-AsS729u2RHUfEra9xJrE39peJcc2stq2+poBXX8bcM08Y6g9j/i/PUzwNQqkaJde7Ntg1TO7bSREbR5sdosQ+g==} dev: true + /synckit@0.8.5: + resolution: {integrity: sha512-L1dapNV6vu2s/4Sputv8xGsCdAVlb5nRDMFU/E27D44l5U6cw1g0dGd45uLc+OXjNMmF4ntiMdCimzcjFKQI8Q==} + engines: {node: ^14.18.0 || >=16.0.0} + dependencies: + '@pkgr/utils': 2.4.2 + tslib: 2.6.2 + dev: true + /tabbable@6.1.1: resolution: {integrity: sha512-4kl5w+nCB44EVRdO0g/UGoOp3vlwgycUVtkk/7DPyeLZUCuNFFKCFG6/t/DgHLrUPHjrZg6s5tNm+56Q2B0xyg==} dev: false - /table@6.8.1: - resolution: {integrity: sha512-Y4X9zqrCftUhMeH2EptSSERdVKt/nEdijTOacGD/97EKjhQ/Qs8RTlEGABSJNNN8lac9kheH+af7yAkEWlgneA==} - engines: {node: '>=10.0.0'} - dependencies: - ajv: 8.11.0 - lodash.truncate: 4.4.2 - slice-ansi: 4.0.0 - string-width: 4.2.3 - strip-ansi: 6.0.1 - dev: true - /tapable@2.2.1: resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==} engines: {node: '>=6'} @@ -17477,12 +18181,21 @@ packages: engines: {node: '>=6'} dev: false + /tinyqueue@2.0.3: + resolution: {integrity: sha512-ppJZNDuKGgxzkHihX8v9v9G5f+18gzaTfrukGrq6ueg0lmH4nqVnA2IPG0AEH3jKEk2GRJCUhDoqpoiw3PHLBA==} + dev: false + /tippy.js@6.3.7: resolution: {integrity: sha512-E1d3oP2emgJ9dRQZdf3Kkn0qJgI6ZLpyS5z6ZkY1DF3kaQaBsGZsndEpHwx+eC+tYM41HaSNvNtLx8tU57FzTQ==} dependencies: '@popperjs/core': 2.11.6 dev: false + /titleize@3.0.0: + resolution: {integrity: sha512-KxVu8EYHDPBdUYdKZdKtU2aj2XfEx9AfjXxE/Aj0vT06w2icA09Vus1rh6eSu1y01akYg6BjIK/hxyLJINoMLQ==} + engines: {node: '>=12'} + dev: true + /tmp@0.0.33: resolution: {integrity: sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==} engines: {node: '>=0.6.0'} @@ -17636,6 +18349,10 @@ packages: /tslib@2.4.1: resolution: {integrity: sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==} + /tslib@2.6.2: + resolution: {integrity: sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==} + dev: true + /tsutils@3.21.0(typescript@4.9.5): resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} engines: {node: '>= 6'} @@ -17719,6 +18436,44 @@ packages: media-typer: 0.3.0 mime-types: 2.1.35 + /typed-array-buffer@1.0.0: + resolution: {integrity: sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.5 + get-intrinsic: 1.2.2 + is-typed-array: 1.1.12 + dev: true + + /typed-array-byte-length@1.0.0: + resolution: {integrity: sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==} + engines: {node: '>= 0.4'} + dependencies: + call-bind: 1.0.5 + for-each: 0.3.3 + has-proto: 1.0.1 + is-typed-array: 1.1.12 + dev: true + + /typed-array-byte-offset@1.0.0: + resolution: {integrity: sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.5 + for-each: 0.3.3 + has-proto: 1.0.1 + is-typed-array: 1.1.12 + dev: true + + /typed-array-length@1.0.4: + resolution: {integrity: sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==} + dependencies: + call-bind: 1.0.5 + for-each: 0.3.3 + is-typed-array: 1.1.12 + dev: true + /typedarray-to-buffer@3.1.5: resolution: {integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==} dependencies: @@ -17735,6 +18490,16 @@ packages: hasBin: true dev: true + /typewise-core@1.2.0: + resolution: {integrity: sha512-2SCC/WLzj2SbUwzFOzqMCkz5amXLlxtJqDKTICqg30x+2DZxcfZN2MvQZmGfXWKNWaKK9pBPsvkcwv8bF/gxKg==} + dev: false + + /typewise@1.0.3: + resolution: {integrity: sha512-aXofE06xGhaQSPzt8hlTY+/YWQhm9P0jYUp1f2XtmW/3Bk0qzXcyFWAtPoo2uTGQj1ZwbDuSyuxicq+aDo8lCQ==} + dependencies: + typewise-core: 1.2.0 + dev: false + /ua-parser-js@0.7.32: resolution: {integrity: sha512-f9BESNVhzlhEFf2CHMSj40NWOjYPl1YKYbrvIr/hFTDEmLq7SRbWvm7FcdcpCYT95zrOhC7gZSxjdnnTpBcwVw==} dev: false @@ -17754,7 +18519,7 @@ packages: /unbox-primitive@1.0.2: resolution: {integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==} dependencies: - call-bind: 1.0.2 + call-bind: 1.0.5 has-bigints: 1.0.2 has-symbols: 1.0.3 which-boxed-primitive: 1.0.2 @@ -17795,6 +18560,16 @@ packages: vfile: 4.2.1 dev: false + /union-value@1.0.1: + resolution: {integrity: sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==} + engines: {node: '>=0.10.0'} + dependencies: + arr-union: 3.1.0 + get-value: 2.0.6 + is-extendable: 0.1.1 + set-value: 2.0.1 + dev: false + /uniq@1.0.1: resolution: {integrity: sha512-Gw+zz50YNKPDKXs+9d+aKAjVwpjNwqzvNpLigIruT4HA9lMZNdMqs9x07kKHB/L9WRzqp4+DlTU5s4wG2esdoA==} dev: true @@ -17914,7 +18689,7 @@ packages: dependencies: '@types/react': 16.14.34 react: 16.14.0 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /use-composed-ref@1.3.0(react@16.14.0): @@ -17996,7 +18771,7 @@ packages: '@types/react': 16.14.34 detect-node-es: 1.1.0 react: 16.14.0 - tslib: 2.4.1 + tslib: 2.6.2 dev: true /use-sync-external-store@1.2.0(react@16.14.0): @@ -18014,8 +18789,8 @@ packages: /util.promisify@1.0.1: resolution: {integrity: sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA==} dependencies: - define-properties: 1.1.4 - es-abstract: 1.20.4 + define-properties: 1.2.1 + es-abstract: 1.22.3 has-symbols: 1.0.3 object.getownpropertydescriptors: 2.1.4 dev: true @@ -18052,10 +18827,6 @@ packages: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} dev: true - /v8-compile-cache@2.3.0: - resolution: {integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==} - dev: true - /v8-to-istanbul@9.0.1: resolution: {integrity: sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==} engines: {node: '>=10.12.0'} @@ -18104,6 +18875,14 @@ packages: vfile-message: 2.0.4 dev: false + /vt-pbf@3.1.3: + resolution: {integrity: sha512-2LzDFzt0mZKZ9IpVF2r69G9bXaP2Q2sArJCmcCgvfTdCCZzSyz4aCLoQyUilu37Ll56tCblIZrXFIjNUpGIlmA==} + dependencies: + '@mapbox/point-geometry': 0.1.0 + '@mapbox/vector-tile': 1.3.1 + pbf: 3.2.1 + dev: false + /w3c-keyname@2.2.6: resolution: {integrity: sha512-f+fciywl1SJEniZHD6H+kUO8gOnwIr7f4ijKA6+ZvJFjeGi1r4PDLl53Ayud9O/rk64RqgoQine0feoeOU0kXg==} dev: false @@ -18342,6 +19121,24 @@ packages: is-string: 1.0.7 is-symbol: 1.0.4 + /which-builtin-type@1.1.3: + resolution: {integrity: sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw==} + engines: {node: '>= 0.4'} + dependencies: + function.prototype.name: 1.1.6 + has-tostringtag: 1.0.0 + is-async-function: 2.0.0 + is-date-object: 1.0.5 + is-finalizationregistry: 1.0.2 + is-generator-function: 1.0.10 + is-regex: 1.1.4 + is-weakref: 1.0.2 + isarray: 2.0.5 + which-boxed-primitive: 1.0.2 + which-collection: 1.0.1 + which-typed-array: 1.1.13 + dev: true + /which-collection@1.0.1: resolution: {integrity: sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==} dependencies: @@ -18354,23 +19151,32 @@ packages: resolution: {integrity: sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==} dev: true + /which-typed-array@1.1.13: + resolution: {integrity: sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==} + engines: {node: '>= 0.4'} + dependencies: + available-typed-arrays: 1.0.5 + call-bind: 1.0.5 + for-each: 0.3.3 + gopd: 1.0.1 + has-tostringtag: 1.0.0 + /which-typed-array@1.1.9: resolution: {integrity: sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==} engines: {node: '>= 0.4'} dependencies: available-typed-arrays: 1.0.5 - call-bind: 1.0.2 + call-bind: 1.0.5 for-each: 0.3.3 gopd: 1.0.1 has-tostringtag: 1.0.0 - is-typed-array: 1.1.10 + is-typed-array: 1.1.12 /which@1.3.1: resolution: {integrity: sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==} hasBin: true dependencies: isexe: 2.0.0 - dev: true /which@2.0.2: resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} @@ -18388,11 +19194,6 @@ packages: resolution: {integrity: sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==} dev: true - /word-wrap@1.2.4: - resolution: {integrity: sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==} - engines: {node: '>=0.10.0'} - dev: true - /wordwrap@1.0.0: resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} dev: true diff --git a/posthog/admin.py b/posthog/admin.py index b35bae3944689..75d6cd710c50f 100644 --- a/posthog/admin.py +++ b/posthog/admin.py @@ -54,13 +54,22 @@ class DashboardAdmin(admin.ModelAdmin): list_display_links = ("id", "name") list_select_related = ("team", "team__organization") search_fields = ("id", "name", "team__name", "team__organization__name") - readonly_fields = ("last_accessed_at", "deprecated_tags", "deprecated_tags_v2", "share_token") + readonly_fields = ( + "last_accessed_at", + "deprecated_tags", + "deprecated_tags_v2", + "share_token", + ) autocomplete_fields = ("team", "created_by") ordering = ("-created_at", "creation_mode") inlines = (DashboardTileInline,) def team_link(self, dashboard: Dashboard): - return format_html('{}', dashboard.team.pk, dashboard.team.name) + return format_html( + '{}', + dashboard.team.pk, + dashboard.team.name, + ) def organization_link(self, dashboard: Dashboard): return format_html( @@ -98,7 +107,11 @@ def effective_name(self, insight: Insight): return insight.name or format_html("{}", insight.derived_name) def team_link(self, insight: Insight): - return format_html('{}', insight.team.pk, insight.team.name) + return format_html( + '{}', + insight.team.pk, + insight.team.name, + ) def organization_link(self, insight: Insight): return format_html( @@ -137,10 +150,23 @@ class GroupTypeMappingInline(admin.TabularInline): @admin.register(Team) class TeamAdmin(admin.ModelAdmin): - list_display = ("id", "name", "organization_link", "organization_id", "created_at", "updated_at") + list_display = ( + "id", + "name", + "organization_link", + "organization_id", + "created_at", + "updated_at", + ) list_display_links = ("id", "name") list_select_related = ("organization",) - search_fields = ("id", "name", "organization__id", "organization__name", "api_token") + search_fields = ( + "id", + "name", + "organization__id", + "organization__name", + "api_token", + ) readonly_fields = ["organization", "primary_dashboard", "test_account_filters"] inlines = [GroupTypeMappingInline, ActionInline] fieldsets = [ @@ -169,7 +195,12 @@ class TeamAdmin(admin.ModelAdmin): "Onboarding", { "classes": ["collapse"], - "fields": ["is_demo", "completed_snippet_onboarding", "ingested_event", "signup_token"], + "fields": [ + "is_demo", + "completed_snippet_onboarding", + "ingested_event", + "signup_token", + ], }, ), ( @@ -198,14 +229,20 @@ class TeamAdmin(admin.ModelAdmin): "Filters", { "classes": ["collapse"], - "fields": ["test_account_filters", "test_account_filters_default_checked", "path_cleaning_filters"], + "fields": [ + "test_account_filters", + "test_account_filters_default_checked", + "path_cleaning_filters", + ], }, ), ] def organization_link(self, team: Team): return format_html( - '{}', team.organization.pk, team.organization.name + '{}', + team.organization.pk, + team.organization.name, ) @@ -303,7 +340,18 @@ class UserAdmin(DjangoUserAdmin): inlines = [OrganizationMemberInline] fieldsets = ( - (None, {"fields": ("email", "password", "current_organization", "is_email_verified", "pending_email")}), + ( + None, + { + "fields": ( + "email", + "password", + "current_organization", + "is_email_verified", + "pending_email", + ) + }, + ), (_("Personal info"), {"fields": ("first_name", "last_name")}), (_("Permissions"), {"fields": ("is_active", "is_staff")}), (_("Important dates"), {"fields": ("last_login", "date_joined")}), @@ -330,14 +378,20 @@ def current_team_link(self, user: User): if not user.team: return "–" - return format_html('{}', user.team.pk, user.team.name) + return format_html( + '{}', + user.team.pk, + user.team.name, + ) def current_organization_link(self, user: User): if not user.organization: return "–" return format_html( - '{}', user.organization.pk, user.organization.name + '{}', + user.organization.pk, + user.organization.name, ) @@ -372,7 +426,12 @@ class OrganizationTeamInline(admin.TabularInline): readonly_fields = ("id", "displayed_name", "created_at", "updated_at") def displayed_name(self, team: Team): - return format_html('{}. {}', team.pk, team.pk, team.name) + return format_html( + '{}. {}', + team.pk, + team.pk, + team.name, + ) @admin.register(Organization) @@ -447,7 +506,15 @@ class InstanceSettingAdmin(admin.ModelAdmin): @admin.register(Person) class PersonAdmin(admin.ModelAdmin): - list_display = ("id", "distinct_ids", "created_at", "team", "is_user", "is_identified", "version") + list_display = ( + "id", + "distinct_ids", + "created_at", + "team", + "is_user", + "is_identified", + "version", + ) list_filter = ("created_at", "is_identified", "version") search_fields = ("id",) diff --git a/posthog/api/__init__.py b/posthog/api/__init__.py index aa82ea7485d1a..8003f2139ce2c 100644 --- a/posthog/api/__init__.py +++ b/posthog/api/__init__.py @@ -157,7 +157,10 @@ def api_not_found(request): ["team_id"], ) projects_router.register( - r"warehouse_view_links", view_link.ViewLinkViewSet, "project_warehouse_view_links", ["team_id"] + r"warehouse_view_links", + view_link.ViewLinkViewSet, + "project_warehouse_view_links", + ["team_id"], ) projects_router.register(r"warehouse_view_link", view_link.ViewLinkViewSet, "warehouse_api", ["team_id"]) diff --git a/posthog/api/action.py b/posthog/api/action.py index f7d6550742324..0774fae8087fe 100644 --- a/posthog/api/action.py +++ b/posthog/api/action.py @@ -13,14 +13,21 @@ from posthog.api.routing import StructuredViewSetMixin from posthog.api.shared import UserBasicSerializer from posthog.api.utils import get_target_entity -from posthog.auth import JwtAuthentication, PersonalAPIKeyAuthentication, TemporaryTokenAuthentication +from posthog.auth import ( + JwtAuthentication, + PersonalAPIKeyAuthentication, + TemporaryTokenAuthentication, +) from posthog.client import sync_execute from posthog.constants import LIMIT, TREND_FILTER_TYPE_EVENTS from posthog.event_usage import report_user_action from posthog.hogql.hogql import HogQLContext from posthog.models import Action, ActionStep, Filter, Person from posthog.models.action.util import format_action_filter -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.queries.trends.trends_actors import TrendsActors from .forbid_destroy_model import ForbidDestroyModel @@ -124,12 +131,15 @@ def create(self, validated_data: Any) -> Any: **{key: value for key, value in step.items() if key not in ("isNew", "selection")}, ) - report_user_action(validated_data["created_by"], "action created", instance.get_analytics_metadata()) + report_user_action( + validated_data["created_by"], + "action created", + instance.get_analytics_metadata(), + ) return instance def update(self, instance: Any, validated_data: Dict[str, Any]) -> Any: - steps = validated_data.pop("steps", None) # If there's no steps property at all we just ignore it # If there is a step property but it's an empty array [], we'll delete all the steps @@ -164,7 +174,12 @@ def update(self, instance: Any, validated_data: Dict[str, Any]) -> Any: return instance -class ActionViewSet(TaggedItemViewSetMixin, StructuredViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet): +class ActionViewSet( + TaggedItemViewSetMixin, + StructuredViewSetMixin, + ForbidDestroyModel, + viewsets.ModelViewSet, +): renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,) queryset = Action.objects.all() serializer_class = ActionSerializer @@ -175,7 +190,11 @@ class ActionViewSet(TaggedItemViewSetMixin, StructuredViewSetMixin, ForbidDestro authentication.SessionAuthentication, authentication.BasicAuthentication, ] - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] ordering = ["-last_calculated_at", "name"] def get_queryset(self): @@ -189,7 +208,9 @@ def get_queryset(self): def list(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: actions = self.get_queryset() - actions_list: List[Dict[Any, Any]] = self.serializer_class(actions, many=True, context={"request": request}).data # type: ignore + actions_list: List[Dict[Any, Any]] = self.serializer_class( + actions, many=True, context={"request": request} + ).data # type: ignore return Response({"results": actions_list}) # NOTE: Deprecated in favour of `persons/trends` endpoint diff --git a/posthog/api/activity_log.py b/posthog/api/activity_log.py index de9e7b471b6bc..31df6adefb586 100644 --- a/posthog/api/activity_log.py +++ b/posthog/api/activity_log.py @@ -63,10 +63,19 @@ def important_changes(self, request: Request, *args: Any, **kwargs: Any) -> Resp my_notebooks = list(Notebook.objects.filter(created_by=user, team_id=self.team.pk).values_list("id", flat=True)) # then things they edited - interesting_changes = ["updated", "exported", "sharing enabled", "sharing disabled", "deleted"] + interesting_changes = [ + "updated", + "exported", + "sharing enabled", + "sharing disabled", + "deleted", + ] my_changed_insights = list( ActivityLog.objects.filter( - team_id=self.team.id, activity__in=interesting_changes, user_id=user.pk, scope="Insight" + team_id=self.team.id, + activity__in=interesting_changes, + user_id=user.pk, + scope="Insight", ) .exclude(item_id__in=my_insights) .values_list("item_id", flat=True) @@ -74,7 +83,10 @@ def important_changes(self, request: Request, *args: Any, **kwargs: Any) -> Resp my_changed_notebooks = list( ActivityLog.objects.filter( - team_id=self.team.id, activity__in=interesting_changes, user_id=user.pk, scope="Notebook" + team_id=self.team.id, + activity__in=interesting_changes, + user_id=user.pk, + scope="Notebook", ) .exclude(item_id__in=my_notebooks) .values_list("item_id", flat=True) @@ -82,7 +94,10 @@ def important_changes(self, request: Request, *args: Any, **kwargs: Any) -> Resp my_changed_feature_flags = list( ActivityLog.objects.filter( - team_id=self.team.id, activity__in=interesting_changes, user_id=user.pk, scope="FeatureFlag" + team_id=self.team.id, + activity__in=interesting_changes, + user_id=user.pk, + scope="FeatureFlag", ) .exclude(item_id__in=my_feature_flags) .values_list("item_id", flat=True) diff --git a/posthog/api/annotation.py b/posthog/api/annotation.py index 7fda011e48cd4..7e2e5d7b1f62d 100644 --- a/posthog/api/annotation.py +++ b/posthog/api/annotation.py @@ -11,7 +11,10 @@ from posthog.api.shared import UserBasicSerializer from posthog.event_usage import report_user_action from posthog.models import Annotation -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) class AnnotationSerializer(serializers.ModelSerializer): @@ -50,7 +53,10 @@ def create(self, validated_data: Dict[str, Any], *args: Any, **kwargs: Any) -> A request = self.context["request"] team = self.context["get_team"]() annotation = Annotation.objects.create( - organization_id=team.organization_id, team_id=team.id, created_by=request.user, **validated_data + organization_id=team.organization_id, + team_id=team.id, + created_by=request.user, + **validated_data, ) return annotation @@ -66,7 +72,11 @@ class AnnotationsViewSet(StructuredViewSetMixin, ForbidDestroyModel, viewsets.Mo queryset = Annotation.objects.select_related("dashboard_item") serializer_class = AnnotationSerializer - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] filter_backends = [filters.SearchFilter] pagination_class = AnnotationsLimitOffsetPagination search_fields = ["content"] diff --git a/posthog/api/app_metrics.py b/posthog/api/app_metrics.py index 601fa1cec2f80..329522c3dc635 100644 --- a/posthog/api/app_metrics.py +++ b/posthog/api/app_metrics.py @@ -5,9 +5,19 @@ from posthog.api.routing import StructuredViewSetMixin from posthog.models.plugin import PluginConfig -from posthog.queries.app_metrics.app_metrics import AppMetricsErrorDetailsQuery, AppMetricsErrorsQuery, AppMetricsQuery -from posthog.queries.app_metrics.historical_exports import historical_export_metrics, historical_exports_activity -from posthog.queries.app_metrics.serializers import AppMetricsErrorsRequestSerializer, AppMetricsRequestSerializer +from posthog.queries.app_metrics.app_metrics import ( + AppMetricsErrorDetailsQuery, + AppMetricsErrorsQuery, + AppMetricsQuery, +) +from posthog.queries.app_metrics.historical_exports import ( + historical_export_metrics, + historical_exports_activity, +) +from posthog.queries.app_metrics.serializers import ( + AppMetricsErrorsRequestSerializer, + AppMetricsRequestSerializer, +) class AppMetricsViewSet(StructuredViewSetMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): @@ -35,7 +45,10 @@ def error_details(self, request: request.Request, *args: Any, **kwargs: Any) -> class HistoricalExportsAppMetricsViewSet( - StructuredViewSetMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.ViewSet + StructuredViewSetMixin, + mixins.ListModelMixin, + mixins.RetrieveModelMixin, + viewsets.ViewSet, ): def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response.Response: return response.Response( diff --git a/posthog/api/async_migration.py b/posthog/api/async_migration.py index 12141f2b54c41..5892151ca98a3 100644 --- a/posthog/api/async_migration.py +++ b/posthog/api/async_migration.py @@ -4,9 +4,16 @@ from semantic_version.base import Version from posthog.api.routing import StructuredViewSetMixin -from posthog.async_migrations.runner import MAX_CONCURRENT_ASYNC_MIGRATIONS, is_posthog_version_compatible +from posthog.async_migrations.runner import ( + MAX_CONCURRENT_ASYNC_MIGRATIONS, + is_posthog_version_compatible, +) from posthog.async_migrations.setup import get_async_migration_definition -from posthog.async_migrations.utils import force_stop_migration, rollback_migration, trigger_migration +from posthog.async_migrations.utils import ( + force_stop_migration, + rollback_migration, + trigger_migration, +) from posthog.constants import FROZEN_POSTHOG_VERSION from posthog.models.async_migration import ( AsyncMigration, @@ -109,7 +116,8 @@ def trigger(self, request, **kwargs): migration_instance = self.get_object() if not is_posthog_version_compatible( - migration_instance.posthog_min_version, migration_instance.posthog_max_version + migration_instance.posthog_min_version, + migration_instance.posthog_max_version, ): return response.Response( { @@ -131,7 +139,11 @@ def resume(self, request, **kwargs): migration_instance = self.get_object() if migration_instance.status != MigrationStatus.Errored: return response.Response( - {"success": False, "error": "Can't resume a migration that isn't in errored state"}, status=400 + { + "success": False, + "error": "Can't resume a migration that isn't in errored state", + }, + status=400, ) migration_instance.status = MigrationStatus.Running @@ -143,9 +155,16 @@ def resume(self, request, **kwargs): def _force_stop(self, rollback: bool): migration_instance = self.get_object() - if migration_instance.status not in [MigrationStatus.Running, MigrationStatus.Starting]: + if migration_instance.status not in [ + MigrationStatus.Running, + MigrationStatus.Starting, + ]: return response.Response( - {"success": False, "error": "Can't stop a migration that isn't running."}, status=400 + { + "success": False, + "error": "Can't stop a migration that isn't running.", + }, + status=400, ) force_stop_migration(migration_instance, rollback=rollback) return response.Response({"success": True}, status=200) @@ -165,7 +184,11 @@ def rollback(self, request, **kwargs): migration_instance = self.get_object() if migration_instance.status != MigrationStatus.Errored: return response.Response( - {"success": False, "error": "Can't rollback a migration that isn't in errored state."}, status=400 + { + "success": False, + "error": "Can't rollback a migration that isn't in errored state.", + }, + status=400, ) rollback_migration(migration_instance) @@ -176,7 +199,10 @@ def force_rollback(self, request, **kwargs): migration_instance = self.get_object() if migration_instance.status != MigrationStatus.CompletedSuccessfully: return response.Response( - {"success": False, "error": "Can't force rollback a migration that did not complete successfully."}, + { + "success": False, + "error": "Can't force rollback a migration that did not complete successfully.", + }, status=400, ) diff --git a/posthog/api/authentication.py b/posthog/api/authentication.py index 2cbc2f2ceed95..7da0859f0d0c6 100644 --- a/posthog/api/authentication.py +++ b/posthog/api/authentication.py @@ -7,7 +7,9 @@ from django.contrib.auth import authenticate, login from django.contrib.auth import views as auth_views from django.contrib.auth.password_validation import validate_password -from django.contrib.auth.tokens import PasswordResetTokenGenerator as DefaultPasswordResetTokenGenerator +from django.contrib.auth.tokens import ( + PasswordResetTokenGenerator as DefaultPasswordResetTokenGenerator, +) from django.core.exceptions import ValidationError from django.db import transaction from django.http import HttpRequest, HttpResponse, JsonResponse @@ -23,7 +25,10 @@ from social_django.views import auth from two_factor.utils import default_device from two_factor.views.core import REMEMBER_COOKIE_PREFIX -from two_factor.views.utils import get_remember_device_cookie, validate_remember_device_cookie +from two_factor.views.utils import ( + get_remember_device_cookie, + validate_remember_device_cookie, +) from posthog.api.email_verification import EmailVerifier from posthog.email import is_email_available @@ -110,12 +115,18 @@ def create(self, validated_data: Dict[str, str]) -> Any: sso_enforcement = OrganizationDomain.objects.get_sso_enforcement_for_email_address(validated_data["email"]) if sso_enforcement: raise serializers.ValidationError( - f"You can only login with SSO for this account ({sso_enforcement}).", code="sso_enforced" + f"You can only login with SSO for this account ({sso_enforcement}).", + code="sso_enforced", ) request = self.context["request"] user = cast( - Optional[User], authenticate(request, email=validated_data["email"], password=validated_data["password"]) + Optional[User], + authenticate( + request, + email=validated_data["email"], + password=validated_data["password"], + ), ) if not user: @@ -211,7 +222,8 @@ def create(self, request: Request, *args: Any, **kwargs: Any) -> Any: ) if int(time.time()) > expiration_time: raise serializers.ValidationError( - detail="Login attempt has expired. Re-enter username/password.", code="2fa_expired" + detail="Login attempt has expired. Re-enter username/password.", + code="2fa_expired", ) with transaction.atomic(): @@ -242,7 +254,8 @@ def create(self, validated_data): # Check SSO enforcement (which happens at the domain level) if OrganizationDomain.objects.get_sso_enforcement_for_email_address(email): raise serializers.ValidationError( - "Password reset is disabled because SSO login is enforced for this domain.", code="sso_enforced" + "Password reset is disabled because SSO login is enforced for this domain.", + code="sso_enforced", ) if not is_email_available(): @@ -278,12 +291,14 @@ def create(self, validated_data): user = User.objects.filter(is_active=True).get(uuid=self.context["view"].kwargs["user_uuid"]) except User.DoesNotExist: raise serializers.ValidationError( - {"token": ["This reset token is invalid or has expired."]}, code="invalid_token" + {"token": ["This reset token is invalid or has expired."]}, + code="invalid_token", ) if not password_reset_token_generator.check_token(user, validated_data["token"]): raise serializers.ValidationError( - {"token": ["This reset token is invalid or has expired."]}, code="invalid_token" + {"token": ["This reset token is invalid or has expired."]}, + code="invalid_token", ) password = validated_data["password"] try: @@ -295,7 +310,11 @@ def create(self, validated_data): user.requested_password_reset_at = None user.save() - login(self.context["request"], user, backend="django.contrib.auth.backends.ModelBackend") + login( + self.context["request"], + user, + backend="django.contrib.auth.backends.ModelBackend", + ) report_user_password_reset(user) return True @@ -332,7 +351,8 @@ def get_object(self): if not user or not password_reset_token_generator.check_token(user, token): raise serializers.ValidationError( - {"token": ["This reset token is invalid or has expired."]}, code="invalid_token" + {"token": ["This reset token is invalid or has expired."]}, + code="invalid_token", ) return {"success": True, "token": token} diff --git a/posthog/api/capture.py b/posthog/api/capture.py index ba07e55f34900..98c52d1781380 100644 --- a/posthog/api/capture.py +++ b/posthog/api/capture.py @@ -58,7 +58,10 @@ # events that are ingested via a separate path than analytics events. They have # fewer restrictions on e.g. the order they need to be processed in. SESSION_RECORDING_DEDICATED_KAFKA_EVENTS = ("$snapshot_items",) -SESSION_RECORDING_EVENT_NAMES = ("$snapshot", "$performance_event") + SESSION_RECORDING_DEDICATED_KAFKA_EVENTS +SESSION_RECORDING_EVENT_NAMES = ( + "$snapshot", + "$performance_event", +) + SESSION_RECORDING_DEDICATED_KAFKA_EVENTS EVENTS_RECEIVED_COUNTER = Counter( "capture_events_received_total", @@ -205,7 +208,9 @@ def _get_sent_at(data, request) -> Tuple[Optional[datetime], Any]: cors_response( request, generate_exception_response( - "capture", f"Malformed request data, invalid sent at: {error}", code="invalid_payload" + "capture", + f"Malformed request data, invalid sent at: {error}", + code="invalid_payload", ), ), ) @@ -321,7 +326,12 @@ def get_event(request): invalid_token_reason = _check_token_shape(token) except Exception as e: invalid_token_reason = "exception" - logger.warning("capture_token_shape_exception", token=token, reason="exception", exception=e) + logger.warning( + "capture_token_shape_exception", + token=token, + reason="exception", + exception=e, + ) if invalid_token_reason: TOKEN_SHAPE_INVALID_COUNTER.labels(reason=invalid_token_reason).inc() @@ -372,7 +382,8 @@ def get_event(request): except ValueError as e: return cors_response( - request, generate_exception_response("capture", f"Invalid payload: {e}", code="invalid_payload") + request, + generate_exception_response("capture", f"Invalid payload: {e}", code="invalid_payload"), ) # We don't use the site_url anymore, but for safe roll-outs keeping it here for now @@ -383,7 +394,8 @@ def get_event(request): processed_events = list(preprocess_events(events)) except ValueError as e: return cors_response( - request, generate_exception_response("capture", f"Invalid payload: {e}", code="invalid_payload") + request, + generate_exception_response("capture", f"Invalid payload: {e}", code="invalid_payload"), ) futures: List[FutureRecordMetadata] = [] @@ -392,7 +404,18 @@ def get_event(request): span.set_tag("event.count", len(processed_events)) for event, event_uuid, distinct_id in processed_events: try: - futures.append(capture_internal(event, distinct_id, ip, site_url, now, sent_at, event_uuid, token)) + futures.append( + capture_internal( + event, + distinct_id, + ip, + site_url, + now, + sent_at, + event_uuid, + token, + ) + ) except Exception as exc: capture_exception(exc, {"data": data}) statsd.incr("posthog_cloud_raw_endpoint_failure", tags={"endpoint": "capture"}) @@ -453,7 +476,18 @@ def get_event(request): if alternative_replay_events: processed_events = list(preprocess_events(alternative_replay_events)) for event, event_uuid, distinct_id in processed_events: - futures.append(capture_internal(event, distinct_id, ip, site_url, now, sent_at, event_uuid, token)) + futures.append( + capture_internal( + event, + distinct_id, + ip, + site_url, + now, + sent_at, + event_uuid, + token, + ) + ) start_time = time.monotonic() for future in futures: @@ -572,7 +606,10 @@ def is_randomly_partitioned(candidate_partition_key: str) -> bool: return True PARTITION_KEY_CAPACITY_EXCEEDED_COUNTER.labels(partition_key=candidate_partition_key.split(":")[0]).inc() - statsd.incr("partition_key_capacity_exceeded", tags={"partition_key": candidate_partition_key}) + statsd.incr( + "partition_key_capacity_exceeded", + tags={"partition_key": candidate_partition_key}, + ) logger.warning( "Partition key %s overridden as bucket capacity of %s tokens exceeded", candidate_partition_key, diff --git a/posthog/api/cohort.py b/posthog/api/cohort.py index d3784b3b6eb4e..c525be8d263be 100644 --- a/posthog/api/cohort.py +++ b/posthog/api/cohort.py @@ -46,9 +46,19 @@ from posthog.models.filters.path_filter import PathFilter from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.filters.lifecycle_filter import LifecycleFilter -from posthog.models.person.sql import INSERT_COHORT_ALL_PEOPLE_THROUGH_PERSON_ID, PERSON_STATIC_COHORT_TABLE -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission -from posthog.queries.actor_base_query import ActorBaseQuery, get_people, serialize_people +from posthog.models.person.sql import ( + INSERT_COHORT_ALL_PEOPLE_THROUGH_PERSON_ID, + PERSON_STATIC_COHORT_TABLE, +) +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) +from posthog.queries.actor_base_query import ( + ActorBaseQuery, + get_people, + serialize_people, +) from posthog.queries.paths import PathsActors from posthog.queries.person_query import PersonQuery from posthog.queries.stickiness import StickinessActors @@ -221,7 +231,10 @@ def update(self, cohort: Cohort, validated_data: Dict, *args: Any, **kwargs: Any report_user_action( request.user, "cohort updated", - {**cohort.get_analytics_metadata(), "updated_by_creator": request.user == cohort.created_by}, + { + **cohort.get_analytics_metadata(), + "updated_by_creator": request.user == cohort.created_by, + }, ) return cohort @@ -237,7 +250,11 @@ def to_representation(self, instance): class CohortViewSet(StructuredViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet): queryset = Cohort.objects.all() serializer_class = CohortSerializer - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] def get_queryset(self) -> QuerySet: queryset = super().get_queryset() @@ -262,7 +279,11 @@ def duplicate_as_static_cohort(self, request: Request, **kwargs) -> Response: "name": f"{cohort.name} (static copy)", "is_static": True, }, - context={"request": request, "from_cohort_id": cohort.pk, "team_id": team.pk}, + context={ + "request": request, + "from_cohort_id": cohort.pk, + "team_id": team.pk, + }, ) cohort_serializer.is_valid(raise_exception=True) @@ -273,7 +294,10 @@ def duplicate_as_static_cohort(self, request: Request, **kwargs) -> Response: @action( methods=["GET"], detail=True, - renderer_classes=[*api_settings.DEFAULT_RENDERER_CLASSES, csvrenderers.PaginatedCSVRenderer], + renderer_classes=[ + *api_settings.DEFAULT_RENDERER_CLASSES, + csvrenderers.PaginatedCSVRenderer, + ], ) def persons(self, request: Request, **kwargs) -> Response: cohort: Cohort = self.get_object() @@ -313,7 +337,12 @@ def persons(self, request: Request, **kwargs) -> Response: ) persons = [] for p in serialized_actors: - person = Person(uuid=p[0], created_at=p[1], is_identified=p[2], properties=json.loads(p[3])) + person = Person( + uuid=p[0], + created_at=p[1], + is_identified=p[2], + properties=json.loads(p[3]), + ) person._distinct_ids = p[4] persons.append(person) @@ -334,8 +363,21 @@ def persons(self, request: Request, **kwargs) -> Response: else None ) if is_csv_request: - KEYS_ORDER = ["id", "email", "name", "created_at", "properties", "distinct_ids"] - DELETE_KEYS = ["value_at_data_point", "uuid", "type", "is_identified", "matched_recordings"] + KEYS_ORDER = [ + "id", + "email", + "name", + "created_at", + "properties", + "distinct_ids", + ] + DELETE_KEYS = [ + "value_at_data_point", + "uuid", + "type", + "is_identified", + "matched_recordings", + ] for actor in serialized_actors: if actor["properties"].get("email"): actor["email"] = actor["properties"]["email"] @@ -344,7 +386,8 @@ def persons(self, request: Request, **kwargs) -> Response: { k: v for k, v in sorted( - actor.items(), key=lambda item: KEYS_ORDER.index(item[0]) if item[0] in KEYS_ORDER else 999999 + actor.items(), + key=lambda item: KEYS_ORDER.index(item[0]) if item[0] in KEYS_ORDER else 999999, ) if k not in DELETE_KEYS } @@ -421,7 +464,11 @@ def insert_cohort_actors_into_ch(cohort: Cohort, filter_data: Dict): WHERE team_id = %(team_id)s AND cohort_id = %(from_cohort_id)s AND version = %(version)s ORDER BY person_id """ - params = {"team_id": cohort.team.pk, "from_cohort_id": existing_cohort.pk, "version": existing_cohort.version} + params = { + "team_id": cohort.team.pk, + "from_cohort_id": existing_cohort.pk, + "version": existing_cohort.version, + } context = Filter(data=filter_data, team=cohort.team).hogql_context else: insight_type = filter_data.get("insight") diff --git a/posthog/api/dashboards/dashboard.py b/posthog/api/dashboards/dashboard.py index 63c9a8d05bcc9..be0f059dbe8dd 100644 --- a/posthog/api/dashboards/dashboard.py +++ b/posthog/api/dashboards/dashboard.py @@ -14,7 +14,9 @@ from rest_framework.response import Response from rest_framework.utils.serializer_helpers import ReturnDict -from posthog.api.dashboards.dashboard_template_json_schema_parser import DashboardTemplateCreationJSONSchemaParser +from posthog.api.dashboards.dashboard_template_json_schema_parser import ( + DashboardTemplateCreationJSONSchemaParser, +) from posthog.api.forbid_destroy_model import ForbidDestroyModel from posthog.api.insight import InsightSerializer, InsightViewSet from posthog.api.routing import StructuredViewSetMixin @@ -29,7 +31,10 @@ from posthog.models.tagged_item import TaggedItem from posthog.models.team.team import check_is_feature_available_for_team from posthog.models.user import User -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.user_permissions import UserPermissionsSerializerMixin logger = structlog.get_logger(__name__) @@ -61,7 +66,14 @@ class DashboardTileSerializer(serializers.ModelSerializer): class Meta: model = DashboardTile - exclude = ["dashboard", "deleted", "filters_hash", "last_refresh", "refreshing", "refresh_attempt"] + exclude = [ + "dashboard", + "deleted", + "filters_hash", + "last_refresh", + "refreshing", + "refresh_attempt", + ] read_only_fields = ["id", "insight"] depth = 1 @@ -76,7 +88,11 @@ def to_representation(self, instance: DashboardTile): return representation -class DashboardBasicSerializer(TaggedItemSerializerMixin, serializers.ModelSerializer, UserPermissionsSerializerMixin): +class DashboardBasicSerializer( + TaggedItemSerializerMixin, + serializers.ModelSerializer, + UserPermissionsSerializerMixin, +): created_by = UserBasicSerializer(read_only=True) effective_privilege_level = serializers.SerializerMethodField() effective_restriction_level = serializers.SerializerMethodField() @@ -252,7 +268,10 @@ def _deep_duplicate_tiles(self, dashboard: Dashboard, existing_tile: DashboardTi text_serializer.save() text = cast(Text, text_serializer.instance) DashboardTile.objects.create( - dashboard=dashboard, text=text, layouts=existing_tile.layouts, color=existing_tile.color + dashboard=dashboard, + text=text, + layouts=existing_tile.layouts, + color=existing_tile.color, ) def update(self, instance: Dashboard, validated_data: Dict, *args: Any, **kwargs: Any) -> Dashboard: @@ -310,13 +329,15 @@ def _update_tiles(instance: Dashboard, tile_data: Dict, user: User) -> None: }, ) DashboardTile.objects.update_or_create( - id=tile_data.get("id", None), defaults={**tile_data, "text": text, "dashboard": instance} + id=tile_data.get("id", None), + defaults={**tile_data, "text": text, "dashboard": instance}, ) elif "deleted" in tile_data or "color" in tile_data or "layouts" in tile_data: tile_data.pop("insight", None) # don't ever update insight tiles here DashboardTile.objects.update_or_create( - id=tile_data.get("id", None), defaults={**tile_data, "dashboard": instance} + id=tile_data.get("id", None), + defaults={**tile_data, "dashboard": instance}, ) @staticmethod @@ -352,7 +373,9 @@ def get_tiles(self, dashboard: Dashboard) -> Optional[List[ReturnDict]]: tiles = DashboardTile.dashboard_queryset(dashboard.tiles).prefetch_related( Prefetch( - "insight__tagged_items", queryset=TaggedItem.objects.select_related("tag"), to_attr="prefetched_tags" + "insight__tagged_items", + queryset=TaggedItem.objects.select_related("tag"), + to_attr="prefetched_tags", ) ) self.user_permissions.set_preloaded_dashboard_tiles(list(tiles)) @@ -382,7 +405,12 @@ def _update_creation_mode(self, validated_data, use_template: str, use_dashboard return {**validated_data, "creation_mode": "default"} -class DashboardsViewSet(TaggedItemViewSetMixin, StructuredViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet): +class DashboardsViewSet( + TaggedItemViewSetMixin, + StructuredViewSetMixin, + ForbidDestroyModel, + viewsets.ModelViewSet, +): queryset = Dashboard.objects.order_by("name") permission_classes = [ IsAuthenticated, @@ -462,11 +490,16 @@ def move_tile(self, request: Request, *args: Any, **kwargs: Any) -> Response: tile.save(update_fields=["dashboard_id"]) serializer = DashboardSerializer( - Dashboard.objects.get(id=from_dashboard), context={"view": self, "request": request} + Dashboard.objects.get(id=from_dashboard), + context={"view": self, "request": request}, ) return Response(serializer.data) - @action(methods=["POST"], detail=False, parser_classes=[DashboardTemplateCreationJSONSchemaParser]) + @action( + methods=["POST"], + detail=False, + parser_classes=[DashboardTemplateCreationJSONSchemaParser], + ) def create_from_template_json(self, request: Request, *args: Any, **kwargs: Any) -> Response: dashboard = Dashboard.objects.create(team_id=self.team_id) diff --git a/posthog/api/dashboards/dashboard_templates.py b/posthog/api/dashboards/dashboard_templates.py index ad00fcf87ef43..c35986ef5431f 100644 --- a/posthog/api/dashboards/dashboard_templates.py +++ b/posthog/api/dashboards/dashboard_templates.py @@ -15,7 +15,10 @@ from posthog.api.forbid_destroy_model import ForbidDestroyModel from posthog.api.routing import StructuredViewSetMixin from posthog.models.dashboard_templates import DashboardTemplate -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) logger = structlog.get_logger(__name__) diff --git a/posthog/api/dashboards/test/test_dashboard_templates.py b/posthog/api/dashboards/test/test_dashboard_templates.py index ac5072e29c6c1..2c44b421f1782 100644 --- a/posthog/api/dashboards/test/test_dashboard_templates.py +++ b/posthog/api/dashboards/test/test_dashboard_templates.py @@ -7,7 +7,14 @@ def assert_template_equals(received, expected): - keys_to_check = ["template_name", "dashboard_description", "tags", "variables", "tiles", "dashboard_filters"] + keys_to_check = [ + "template_name", + "dashboard_description", + "tags", + "variables", + "tiles", + "dashboard_filters", + ] for key in keys_to_check: assert received[key] == expected[key], f"key {key} failed, expected {expected[key]} but got {received[key]}" @@ -256,7 +263,8 @@ def test_delete_dashboard_template_by_id(self) -> None: dashboard_template = DashboardTemplate.objects.get(id=response.json()["id"]) update_response = self.client.patch( - f"/api/projects/{self.team.pk}/dashboard_templates/{response.json()['id']}", {"deleted": True} + f"/api/projects/{self.team.pk}/dashboard_templates/{response.json()['id']}", + {"deleted": True}, ) assert update_response.status_code == status.HTTP_200_OK, update_response @@ -279,7 +287,8 @@ def test_non_staff_user_cannot_delete_dashboard_template_by_id(self) -> None: self.user.save() patch_response = self.client.patch( - f"/api/projects/{self.team.pk}/dashboard_templates/{response.json()['id']}", {"deleted": True} + f"/api/projects/{self.team.pk}/dashboard_templates/{response.json()['id']}", + {"deleted": True}, ) assert patch_response.status_code == status.HTTP_403_FORBIDDEN, patch_response @@ -307,15 +316,41 @@ def test_update_dashboard_template_by_id(self) -> None: def test_dashboard_template_schema(self) -> None: dashboard_template_schema = { "type": "object", - "required": ["template_name", "dashboard_description", "dashboard_filters", "tiles"], + "required": [ + "template_name", + "dashboard_description", + "dashboard_filters", + "tiles", + ], "properties": { - "id": {"description": "The id of the dashboard template", "type": "string"}, - "template_name": {"description": "The name of the dashboard template", "type": "string"}, - "team_id": {"description": "The team this dashboard template belongs to", "type": ["number", "null"]}, - "created_at": {"description": "When the dashboard template was created", "type": "string"}, - "image_url": {"description": "The image of the dashboard template", "type": ["string", "null"]}, - "dashboard_description": {"description": "The description of the dashboard template", "type": "string"}, - "dashboard_filters": {"description": "The filters of the dashboard template", "type": "object"}, + "id": { + "description": "The id of the dashboard template", + "type": "string", + }, + "template_name": { + "description": "The name of the dashboard template", + "type": "string", + }, + "team_id": { + "description": "The team this dashboard template belongs to", + "type": ["number", "null"], + }, + "created_at": { + "description": "When the dashboard template was created", + "type": "string", + }, + "image_url": { + "description": "The image of the dashboard template", + "type": ["string", "null"], + }, + "dashboard_description": { + "description": "The description of the dashboard template", + "type": "string", + }, + "dashboard_filters": { + "description": "The filters of the dashboard template", + "type": "object", + }, "tiles": { "description": "The tiles of the dashboard template", "type": "array", @@ -329,14 +364,39 @@ def test_dashboard_template_schema(self) -> None: "type": "array", "items": { "type": "object", - "required": ["id", "name", "type", "default", "description", "required"], + "required": [ + "id", + "name", + "type", + "default", + "description", + "required", + ], "properties": { - "id": {"description": "The id of the variable", "type": "string"}, - "name": {"description": "The name of the variable", "type": "string"}, - "type": {"description": "The type of the variable", "enum": ["event"]}, - "default": {"description": "The default value of the variable", "type": "object"}, - "description": {"description": "The description of the variable", "type": "string"}, - "required": {"description": "Whether the variable is required", "type": "boolean"}, + "id": { + "description": "The id of the variable", + "type": "string", + }, + "name": { + "description": "The name of the variable", + "type": "string", + }, + "type": { + "description": "The type of the variable", + "enum": ["event"], + }, + "default": { + "description": "The default value of the variable", + "type": "object", + }, + "description": { + "description": "The description of the variable", + "type": "string", + }, + "required": { + "description": "Whether the variable is required", + "type": "boolean", + }, }, }, }, @@ -373,7 +433,8 @@ def test_cant_make_templates_without_teamid_private(self) -> None: # can't update the default template to be private response = self.client.patch( - f"/api/projects/{self.team.pk}/dashboard_templates/{dashboard_template.id}", {"scope": "team"} + f"/api/projects/{self.team.pk}/dashboard_templates/{dashboard_template.id}", + {"scope": "team"}, ) # unauthorized assert response.status_code == status.HTTP_400_BAD_REQUEST diff --git a/posthog/api/data_management.py b/posthog/api/data_management.py index a5060cc503b8a..186f2f83021f7 100644 --- a/posthog/api/data_management.py +++ b/posthog/api/data_management.py @@ -15,6 +15,11 @@ def all_activity(self, request: request.Request, **kwargs): limit = int(request.query_params.get("limit", "10")) page = int(request.query_params.get("page", "1")) - activity_page = load_all_activity(scope_list=["EventDefinition", "PropertyDefinition"], team_id=request.user.team.id, limit=limit, page=page) # type: ignore + activity_page = load_all_activity( + scope_list=["EventDefinition", "PropertyDefinition"], + team_id=request.user.team.id, # type: ignore + limit=limit, + page=page, + ) return activity_page_response(activity_page, limit, page, request) diff --git a/posthog/api/dead_letter_queue.py b/posthog/api/dead_letter_queue.py index e9beee6fb2ce2..93e2b09370b0e 100644 --- a/posthog/api/dead_letter_queue.py +++ b/posthog/api/dead_letter_queue.py @@ -25,7 +25,10 @@ "dlq_events_per_error": { "metric": "Total events per error", "fn": lambda offset: { - "subrows": {"columns": ["Error", "Total events"], "rows": get_dead_letter_queue_events_per_error(offset)} + "subrows": { + "columns": ["Error", "Total events"], + "rows": get_dead_letter_queue_events_per_error(offset), + } }, }, "dlq_events_per_location": { @@ -40,13 +43,19 @@ "dlq_events_per_day": { "metric": "Total events per day", "fn": lambda offset: { - "subrows": {"columns": ["Date", "Total events"], "rows": get_dead_letter_queue_events_per_day(offset)} + "subrows": { + "columns": ["Date", "Total events"], + "rows": get_dead_letter_queue_events_per_day(offset), + } }, }, "dlq_events_per_tag": { "metric": "Total events per tag", "fn": lambda offset: { - "subrows": {"columns": ["Date", "Total events"], "rows": get_dead_letter_queue_events_per_tag(offset)} + "subrows": { + "columns": ["Date", "Total events"], + "rows": get_dead_letter_queue_events_per_tag(offset), + } }, }, } diff --git a/posthog/api/decide.py b/posthog/api/decide.py index 335594fa64132..1f173bdc39016 100644 --- a/posthog/api/decide.py +++ b/posthog/api/decide.py @@ -1,31 +1,34 @@ -from random import random import re +from random import random from typing import Any, Dict, List, Optional, Union from urllib.parse import urlparse -from posthog.database_healthcheck import DATABASE_FOR_FLAG_MATCHING -from posthog.metrics import LABEL_TEAM_ID -from posthog.models.feature_flag.flag_analytics import increment_request_count -from posthog.models.filters.mixins.utils import process_bool import structlog +from django.conf import settings from django.http import HttpRequest, JsonResponse from django.views.decorators.csrf import csrf_exempt -from django.conf import settings +from prometheus_client import Counter from rest_framework import status from sentry_sdk import capture_exception from statshog.defaults.django import statsd -from prometheus_client import Counter - from posthog.api.geoip import get_geoip_properties from posthog.api.utils import get_project_id, get_token +from posthog.database_healthcheck import DATABASE_FOR_FLAG_MATCHING from posthog.exceptions import RequestParsingError, generate_exception_response from posthog.logging.timing import timed +from posthog.metrics import LABEL_TEAM_ID from posthog.models import Team, User from posthog.models.feature_flag import get_all_feature_flags +from posthog.models.feature_flag.flag_analytics import increment_request_count +from posthog.models.filters.mixins.utils import process_bool from posthog.models.utils import execute_with_timeout from posthog.plugins.site import get_decide_site_apps -from posthog.utils import get_ip_address, label_for_team_id_to_track, load_data_from_request +from posthog.utils import ( + get_ip_address, + label_for_team_id_to_track, + load_data_from_request, +) from posthog.utils_cors import cors_response FLAG_EVALUATION_COUNTER = Counter( @@ -210,11 +213,17 @@ def get_decide(request: HttpRequest): response["capturePerformance"] = True if team.capture_performance_opt_in else False response["autocapture_opt_out"] = True if team.autocapture_opt_out else False response["autocaptureExceptions"] = ( - {"endpoint": "/e/", "errors_to_ignore": team.autocapture_exceptions_errors_to_ignore or []} + { + "endpoint": "/e/", + "errors_to_ignore": team.autocapture_exceptions_errors_to_ignore or [], + } if team.autocapture_exceptions_opt_in else False ) + if settings.NEW_ANALYTICS_CAPTURE_TEAM_IDS and str(team.id) in settings.NEW_ANALYTICS_CAPTURE_TEAM_IDS: + response["analytics"] = {"endpoint": settings.NEW_ANALYTICS_CAPTURE_ENDPOINT} + if team.session_recording_opt_in and ( on_permitted_recording_domain(team, request) or not team.recording_domains ): diff --git a/posthog/api/documentation.py b/posthog/api/documentation.py index ef1bfbcda962b..8bb076235cdb3 100644 --- a/posthog/api/documentation.py +++ b/posthog/api/documentation.py @@ -2,7 +2,10 @@ from typing import Dict, get_args from drf_spectacular.types import OpenApiTypes -from drf_spectacular.utils import extend_schema, extend_schema_field # # noqa: F401 for easy import +from drf_spectacular.utils import ( + extend_schema, # noqa: F401 + extend_schema_field, +) # # noqa: F401 for easy import from rest_framework import fields, serializers from posthog.models.entity import MathType @@ -20,16 +23,26 @@ def to_internal_value(self, data): class PropertyItemSerializer(serializers.Serializer): key = serializers.CharField( - help_text="Key of the property you're filtering on. For example `email` or `$current_url`", required=True + help_text="Key of the property you're filtering on. For example `email` or `$current_url`", + required=True, ) value = ValueField( help_text='Value of your filter. For example `test@example.com` or `https://example.com/test/`. Can be an array for an OR query, like `["test@example.com","ok@example.com"]`', required=True, ) operator = serializers.ChoiceField( - choices=get_args(OperatorType), required=False, allow_blank=True, default="exact", allow_null=True + choices=get_args(OperatorType), + required=False, + allow_blank=True, + default="exact", + allow_null=True, + ) + type = serializers.ChoiceField( + choices=get_args(PropertyType), + default="event", + required=False, + allow_blank=True, ) - type = serializers.ChoiceField(choices=get_args(PropertyType), default="event", required=False, allow_blank=True) property_help_text = "Filter events by event property, person property, cohort, groups and more." @@ -132,7 +145,10 @@ class FilterEventSerializer(serializers.Serializer): id = serializers.CharField(help_text="Name of the event to filter on. For example `$pageview` or `user sign up`.") properties = PropertySerializer(many=True, required=False) math = serializers.ChoiceField( - help_text=math_help_text, choices=get_args(MathType), default="total", required=False + help_text=math_help_text, + choices=get_args(MathType), + default="total", + required=False, ) @@ -140,7 +156,10 @@ class FilterActionSerializer(serializers.Serializer): id = serializers.CharField(help_text="ID of the action to filter on. For example `2841`.") properties = PropertySerializer(many=True, required=False) math = serializers.ChoiceField( - help_text=math_help_text, choices=get_args(MathType), default="total", required=False + help_text=math_help_text, + choices=get_args(MathType), + default="total", + required=False, ) @@ -166,7 +185,10 @@ def custom_postprocessing_hook(result, generator, request, public): paths[path] = {} for method, definition in methods.items(): definition["tags"] = [d for d in definition["tags"] if d not in ["projects"]] - match = re.search(r"((\/api\/(organizations|projects)/{(.*?)}\/)|(\/api\/))(?P[a-zA-Z0-9-_]*)\/", path) + match = re.search( + r"((\/api\/(organizations|projects)/{(.*?)}\/)|(\/api\/))(?P[a-zA-Z0-9-_]*)\/", + path, + ) if match: definition["tags"].append(match.group("one")) for tag in definition["tags"]: diff --git a/posthog/api/early_access_feature.py b/posthog/api/early_access_feature.py index 90d3cd33dccbf..b9129b1dc2333 100644 --- a/posthog/api/early_access_feature.py +++ b/posthog/api/early_access_feature.py @@ -15,7 +15,10 @@ from posthog.models.feature_flag.feature_flag import FeatureFlag from posthog.models.team.team import Team -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from django.utils.text import slugify from django.views.decorators.csrf import csrf_exempt @@ -65,7 +68,6 @@ def update(self, instance: EarlyAccessFeature, validated_data: Any) -> EarlyAcce stage = validated_data.get("stage", None) if instance.stage != EarlyAccessFeature.Stage.BETA and stage == EarlyAccessFeature.Stage.BETA: - super_conditions = lambda feature_flag_key: [ { "properties": [ @@ -82,7 +84,6 @@ def update(self, instance: EarlyAccessFeature, validated_data: Any) -> EarlyAcce related_feature_flag = instance.feature_flag if related_feature_flag: - related_feature_flag_key = related_feature_flag.key serialized_data_filters = { **related_feature_flag.filters, @@ -90,7 +91,10 @@ def update(self, instance: EarlyAccessFeature, validated_data: Any) -> EarlyAcce } serializer = FeatureFlagSerializer( - related_feature_flag, data={"filters": serialized_data_filters}, context=self.context, partial=True + related_feature_flag, + data={"filters": serialized_data_filters}, + context=self.context, + partial=True, ) serializer.is_valid(raise_exception=True) serializer.save() @@ -178,10 +182,16 @@ def create(self, validated_data): feature_flag_key = feature_flag.key if validated_data.get("stage") == EarlyAccessFeature.Stage.BETA: - serialized_data_filters = {**feature_flag.filters, "super_groups": super_conditions(feature_flag_key)} + serialized_data_filters = { + **feature_flag.filters, + "super_groups": super_conditions(feature_flag_key), + } serializer = FeatureFlagSerializer( - feature_flag, data={"filters": serialized_data_filters}, context=self.context, partial=True + feature_flag, + data={"filters": serialized_data_filters}, + context=self.context, + partial=True, ) serializer.is_valid(raise_exception=True) serializer.save() @@ -242,7 +252,6 @@ def destroy(self, request: Request, *args: Any, **kwargs: Any) -> Response: @csrf_exempt def early_access_features(request: Request): - token = get_token(None, request) if not token: diff --git a/posthog/api/element.py b/posthog/api/element.py index 6787b46e93451..5581ba0e6eb2a 100644 --- a/posthog/api/element.py +++ b/posthog/api/element.py @@ -14,7 +14,10 @@ from posthog.models.element.sql import GET_ELEMENTS, GET_VALUES from posthog.models.instance_setting import get_instance_setting from posthog.models.property.util import parse_prop_grouped_clauses -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.queries.query_date_range import QueryDateRange from posthog.utils import format_query_params_absolute_url @@ -46,7 +49,11 @@ class ElementViewSet(StructuredViewSetMixin, viewsets.ModelViewSet): authentication.SessionAuthentication, authentication.BasicAuthentication, ] - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] include_in_docs = False @action(methods=["GET"], detail=False) @@ -84,14 +91,23 @@ def stats(self, request: request.Request, **kwargs) -> response.Response: paginate_response = request.query_params.get("paginate_response", "false") == "true" if not paginate_response: # once we are getting no hits on this counter we can default to paginated responses - statsd.incr("toolbar_element_stats_unpaginated_api_request_tombstone", tags={"team_id": self.team_id}) + statsd.incr( + "toolbar_element_stats_unpaginated_api_request_tombstone", + tags={"team_id": self.team_id}, + ) prop_filters, prop_filter_params = parse_prop_grouped_clauses( - team_id=self.team.pk, property_group=filter.property_groups, hogql_context=filter.hogql_context + team_id=self.team.pk, + property_group=filter.property_groups, + hogql_context=filter.hogql_context, ) result = sync_execute( GET_ELEMENTS.format( - date_from=date_from, date_to=date_to, query=prop_filters, limit=limit + 1, offset=offset + date_from=date_from, + date_to=date_to, + query=prop_filters, + limit=limit + 1, + offset=offset, ), { "team_id": self.team.pk, @@ -117,7 +133,13 @@ def stats(self, request: request.Request, **kwargs) -> response.Response: has_next = len(result) == limit + 1 next_url = format_query_params_absolute_url(request, offset + limit) if has_next else None previous_url = format_query_params_absolute_url(request, offset - limit) if offset - limit >= 0 else None - return response.Response({"results": serialized_elements, "next": next_url, "previous": previous_url}) + return response.Response( + { + "results": serialized_elements, + "next": next_url, + "previous": previous_url, + } + ) else: return response.Response(serialized_elements) @@ -161,7 +183,12 @@ def values(self, request: request.Request, **kwargs) -> response.Response: filter_regex = select_regex result = sync_execute( - GET_VALUES.format(), {"team_id": self.team.id, "regex": select_regex, "filter_regex": filter_regex} + GET_VALUES.format(), + { + "team_id": self.team.id, + "regex": select_regex, + "filter_regex": filter_regex, + }, ) return response.Response([{"name": value[0]} for value in result]) diff --git a/posthog/api/event.py b/posthog/api/event.py index 9d2f3fafeebff..662b3affab87f 100644 --- a/posthog/api/event.py +++ b/posthog/api/event.py @@ -25,9 +25,15 @@ from posthog.models.person.util import get_persons_by_distinct_ids from posthog.models.team import Team from posthog.models.utils import UUIDT -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.queries.property_values import get_property_values_for_key -from posthog.rate_limit import ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle +from posthog.rate_limit import ( + ClickHouseBurstRateThrottle, + ClickHouseSustainedRateThrottle, +) from posthog.utils import convert_property_value, flatten QUERY_DEFAULT_EXPORT_LIMIT = 3_500 @@ -52,13 +58,27 @@ class Meta: ] -class EventViewSet(StructuredViewSetMixin, mixins.RetrieveModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet): +class EventViewSet( + StructuredViewSetMixin, + mixins.RetrieveModelMixin, + mixins.ListModelMixin, + viewsets.GenericViewSet, +): renderer_classes = tuple(api_settings.DEFAULT_RENDERER_CLASSES) + (csvrenderers.PaginatedCSVRenderer,) serializer_class = ClickhouseEventSerializer - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle] - def _build_next_url(self, request: request.Request, last_event_timestamp: datetime, order_by: List[str]) -> str: + def _build_next_url( + self, + request: request.Request, + last_event_timestamp: datetime, + order_by: List[str], + ) -> str: params = request.GET.dict() reverse = "-timestamp" in order_by timestamp = last_event_timestamp.astimezone().isoformat() @@ -88,14 +108,26 @@ def _build_next_url(self, request: request.Request, last_event_timestamp: dateti many=True, ), OpenApiParameter("person_id", OpenApiTypes.INT, description="Filter list by person id."), - OpenApiParameter("distinct_id", OpenApiTypes.INT, description="Filter list by distinct id."), OpenApiParameter( - "before", OpenApiTypes.DATETIME, description="Only return events with a timestamp before this time." + "distinct_id", + OpenApiTypes.INT, + description="Filter list by distinct id.", ), OpenApiParameter( - "after", OpenApiTypes.DATETIME, description="Only return events with a timestamp after this time." + "before", + OpenApiTypes.DATETIME, + description="Only return events with a timestamp before this time.", + ), + OpenApiParameter( + "after", + OpenApiTypes.DATETIME, + description="Only return events with a timestamp after this time.", + ), + OpenApiParameter( + "limit", + OpenApiTypes.INT, + description="The maximum number of results to return", ), - OpenApiParameter("limit", OpenApiTypes.INT, description="The maximum number of results to return"), PropertiesSerializer(required=False), ] ) @@ -147,7 +179,9 @@ def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response. ) result = ClickhouseEventSerializer( - query_result[0:limit], many=True, context={"people": self._get_people(query_result, team)} + query_result[0:limit], + many=True, + context={"people": self._get_people(query_result, team)}, ).data next_url: Optional[str] = None @@ -170,12 +204,20 @@ def _get_people(self, query_result: List[Dict], team: Team) -> Dict[str, Any]: return distinct_to_person def retrieve( - self, request: request.Request, pk: Optional[Union[int, str]] = None, *args: Any, **kwargs: Any + self, + request: request.Request, + pk: Optional[Union[int, str]] = None, + *args: Any, + **kwargs: Any, ) -> response.Response: - if not isinstance(pk, str) or not UUIDT.is_valid_uuid(pk): return response.Response( - {"detail": "Invalid UUID", "code": "invalid", "type": "validation_error"}, status=400 + { + "detail": "Invalid UUID", + "code": "invalid", + "type": "validation_error", + }, + status=400, ) query_result = query_with_columns( SELECT_ONE_EVENT_SQL, diff --git a/posthog/api/event_definition.py b/posthog/api/event_definition.py index 5d208bc9c4b55..6616c8beaddab 100644 --- a/posthog/api/event_definition.py +++ b/posthog/api/event_definition.py @@ -1,7 +1,15 @@ from typing import Any, Literal, Tuple, Type, cast from django.db.models import Manager, Prefetch -from rest_framework import mixins, permissions, serializers, viewsets, request, status, response +from rest_framework import ( + mixins, + permissions, + serializers, + viewsets, + request, + status, + response, +) from posthog.api.routing import StructuredViewSetMixin from posthog.api.shared import UserBasicSerializer @@ -15,7 +23,10 @@ from posthog.models.activity_logging.activity_log import Detail, log_activity from posthog.models.user import User from posthog.models.utils import UUIDT -from posthog.permissions import OrganizationMemberPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + OrganizationMemberPermissions, + TeamMemberAccessPermission, +) from posthog.settings import EE_AVAILABLE # If EE is enabled, we use ee.api.ee_event_definition.EnterpriseEventDefinitionSerializer @@ -132,7 +143,9 @@ def _ordering_params_from_request( def get_object(self): id = self.kwargs["id"] - if EE_AVAILABLE and self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY): # type: ignore + if EE_AVAILABLE and self.request.user.organization.is_feature_available( # type: ignore + AvailableFeature.INGESTION_TAXONOMY + ): from ee.models.event_definition import EnterpriseEventDefinition enterprise_event = EnterpriseEventDefinition.objects.filter(id=id).first() @@ -151,7 +164,9 @@ def get_object(self): def get_serializer_class(self) -> Type[serializers.ModelSerializer]: serializer_class = self.serializer_class - if EE_AVAILABLE and self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY): # type: ignore + if EE_AVAILABLE and self.request.user.organization.is_feature_available( # type: ignore + AvailableFeature.INGESTION_TAXONOMY + ): from ee.api.ee_event_definition import EnterpriseEventDefinitionSerializer serializer_class = EnterpriseEventDefinitionSerializer # type: ignore @@ -162,7 +177,11 @@ def destroy(self, request: request.Request, *args: Any, **kwargs: Any) -> respon instance_id: str = str(instance.id) self.perform_destroy(instance) # Casting, since an anonymous use CANNOT access this endpoint - report_user_action(cast(User, request.user), "event definition deleted", {"name": instance.name}) + report_user_action( + cast(User, request.user), + "event definition deleted", + {"name": instance.name}, + ) user = cast(User, request.user) log_activity( organization_id=cast(UUIDT, self.organization_id), diff --git a/posthog/api/exports.py b/posthog/api/exports.py index 3a822aa9b3d06..e4f5dd8104f8c 100644 --- a/posthog/api/exports.py +++ b/posthog/api/exports.py @@ -19,7 +19,10 @@ from posthog.models import Insight, User from posthog.models.activity_logging.activity_log import Change, Detail, log_activity from posthog.models.exported_asset import ExportedAsset, get_content_response -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.tasks import exporter logger = structlog.get_logger(__name__) @@ -71,7 +74,11 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> ExportedAss return self._create_asset(validated_data, user=request.user, asset_generation_timeout=10, reason=None) def _create_asset( - self, validated_data: Dict, user: User | None, asset_generation_timeout: float, reason: str | None + self, + validated_data: Dict, + user: User | None, + asset_generation_timeout: float, + reason: str | None, ) -> ExportedAsset: if user is not None: validated_data["created_by"] = user @@ -100,13 +107,20 @@ def _create_asset( short_id=insight.short_id, changes=[ Change( - type="Insight", action="exported", field="export_format", after=instance.export_format + type="Insight", + action="exported", + field="export_format", + after=instance.export_format, ) ], ), ) except Insight.DoesNotExist as ex: - logger.warn("insight_exports.unknown_insight", exception=ex, insight_id=insight_id) + logger.warn( + "insight_exports.unknown_insight", + exception=ex, + insight_id=insight_id, + ) pass return instance @@ -130,13 +144,24 @@ def generate_export_sync(instance: ExportedAsset, timeout: float = 10) -> None: class ExportedAssetViewSet( - mixins.RetrieveModelMixin, mixins.CreateModelMixin, StructuredViewSetMixin, viewsets.GenericViewSet + mixins.RetrieveModelMixin, + mixins.CreateModelMixin, + StructuredViewSetMixin, + viewsets.GenericViewSet, ): queryset = ExportedAsset.objects.order_by("-created_at") serializer_class = ExportedAssetSerializer - authentication_classes = [PersonalAPIKeyAuthentication, SessionAuthentication, BasicAuthentication] - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + authentication_classes = [ + PersonalAPIKeyAuthentication, + SessionAuthentication, + BasicAuthentication, + ] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] # TODO: This should be removed as it is only used by frontend exporter and can instead use the api/sharing.py endpoint @action(methods=["GET"], detail=True) diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index f61543e14f5cb..6e61251818f48 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -3,7 +3,14 @@ from django.db.models import QuerySet, Q from django.conf import settings -from rest_framework import authentication, exceptions, request, serializers, status, viewsets +from rest_framework import ( + authentication, + exceptions, + request, + serializers, + status, + viewsets, +) from rest_framework.decorators import action from rest_framework.permissions import SAFE_METHODS, BasePermission, IsAuthenticated from rest_framework.request import Request @@ -18,9 +25,16 @@ from posthog.auth import PersonalAPIKeyAuthentication, TemporaryTokenAuthentication from posthog.constants import FlagRequestType from posthog.event_usage import report_user_action -from posthog.helpers.dashboard_templates import add_enriched_insights_to_feature_flag_dashboard +from posthog.helpers.dashboard_templates import ( + add_enriched_insights_to_feature_flag_dashboard, +) from posthog.models import FeatureFlag -from posthog.models.activity_logging.activity_log import Detail, changes_between, load_activity, log_activity +from posthog.models.activity_logging.activity_log import ( + Detail, + changes_between, + load_activity, + log_activity, +) from posthog.models.activity_logging.activity_page import activity_page_response from posthog.models.cohort import Cohort from posthog.models.cohort.util import get_dependent_cohorts @@ -35,7 +49,10 @@ from posthog.models.feedback.survey import Survey from posthog.models.group_type_mapping import GroupTypeMapping from posthog.models.property import Property -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.rate_limit import BurstRateThrottle DATABASE_FOR_LOCAL_EVALUATION = ( @@ -68,10 +85,10 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo is_simple_flag = serializers.SerializerMethodField() rollout_percentage = serializers.SerializerMethodField() - experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True) + experiment_set: (serializers.PrimaryKeyRelatedField) = serializers.PrimaryKeyRelatedField(many=True, read_only=True) surveys: serializers.SerializerMethodField = serializers.SerializerMethodField() features: serializers.SerializerMethodField = serializers.SerializerMethodField() - usage_dashboard: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(read_only=True) + usage_dashboard: (serializers.PrimaryKeyRelatedField) = serializers.PrimaryKeyRelatedField(read_only=True) analytics_dashboards = serializers.PrimaryKeyRelatedField( many=True, required=False, @@ -204,7 +221,8 @@ def properties_all_match(predicate): ) except Cohort.DoesNotExist: raise serializers.ValidationError( - detail=f"Cohort with id {prop.value} does not exist", code="cohort_does_not_exist" + detail=f"Cohort with id {prop.value} does not exist", + code="cohort_does_not_exist", ) payloads = filters.get("payloads", {}) @@ -222,7 +240,6 @@ def properties_all_match(predicate): return filters def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> FeatureFlag: - request = self.context["request"] validated_data["created_by"] = request.user validated_data["team_id"] = self.context["team_id"] @@ -252,7 +269,6 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> FeatureFlag return instance def update(self, instance: FeatureFlag, validated_data: Dict, *args: Any, **kwargs: Any) -> FeatureFlag: - if "deleted" in validated_data and validated_data["deleted"] is True and instance.features.count() > 0: raise exceptions.ValidationError( "Cannot delete a feature flag that is in use with early access features. Please delete the early access feature before deleting the flag." @@ -319,7 +335,12 @@ class Meta: ] -class FeatureFlagViewSet(TaggedItemViewSetMixin, StructuredViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet): +class FeatureFlagViewSet( + TaggedItemViewSetMixin, + StructuredViewSetMixin, + ForbidDestroyModel, + viewsets.ModelViewSet, +): """ Create, read, update and delete feature flags. [See docs](https://posthog.com/docs/user-guides/feature-flags) for more information on feature flags. @@ -468,7 +489,6 @@ def my_flags(self, request: request.Request, **kwargs): @action(methods=["GET"], detail=False, throttle_classes=[FeatureFlagThrottle]) def local_evaluation(self, request: request.Request, **kwargs): - feature_flags: QuerySet[FeatureFlag] = FeatureFlag.objects.using(DATABASE_FOR_LOCAL_EVALUATION).filter( team_id=self.team_id, deleted=False, active=True ) @@ -493,7 +513,8 @@ def local_evaluation(self, request: request.Request, **kwargs): if not should_send_cohorts and ( len( feature_flag.get_cohort_ids( - using_database=DATABASE_FOR_LOCAL_EVALUATION, seen_cohorts_cache=seen_cohorts_cache + using_database=DATABASE_FOR_LOCAL_EVALUATION, + seen_cohorts_cache=seen_cohorts_cache, ) ) == 1 @@ -501,7 +522,8 @@ def local_evaluation(self, request: request.Request, **kwargs): feature_flag.filters = { **filters, "groups": feature_flag.transform_cohort_filters_for_easy_evaluation( - using_database=DATABASE_FOR_LOCAL_EVALUATION, seen_cohorts_cache=seen_cohorts_cache + using_database=DATABASE_FOR_LOCAL_EVALUATION, + seen_cohorts_cache=seen_cohorts_cache, ), } else: @@ -513,7 +535,8 @@ def local_evaluation(self, request: request.Request, **kwargs): # irrespective of complexity if should_send_cohorts: for id in feature_flag.get_cohort_ids( - using_database=DATABASE_FOR_LOCAL_EVALUATION, seen_cohorts_cache=seen_cohorts_cache + using_database=DATABASE_FOR_LOCAL_EVALUATION, + seen_cohorts_cache=seen_cohorts_cache, ): # don't duplicate queries for already added cohorts if id not in cohorts: @@ -548,7 +571,6 @@ def local_evaluation(self, request: request.Request, **kwargs): @action(methods=["GET"], detail=False) def evaluation_reasons(self, request: request.Request, **kwargs): - distinct_id = request.query_params.get("distinct_id", None) groups = json.loads(request.query_params.get("groups", "{}")) @@ -582,7 +604,6 @@ def evaluation_reasons(self, request: request.Request, **kwargs): @action(methods=["POST"], detail=False) def user_blast_radius(self, request: request.Request, **kwargs): - if "condition" not in request.data: raise exceptions.ValidationError("Missing condition for which to get blast radius") @@ -617,7 +638,11 @@ def activity(self, request: request.Request, **kwargs): return Response("", status=status.HTTP_404_NOT_FOUND) activity_page = load_activity( - scope="FeatureFlag", team_id=self.team_id, item_id=item_id, limit=limit, page=page + scope="FeatureFlag", + team_id=self.team_id, + item_id=item_id, + limit=limit, + page=page, ) return activity_page_response(activity_page, limit, page, request) diff --git a/posthog/api/ingestion_warnings.py b/posthog/api/ingestion_warnings.py index 11192e402e229..827567e5c8cb8 100644 --- a/posthog/api/ingestion_warnings.py +++ b/posthog/api/ingestion_warnings.py @@ -21,7 +21,10 @@ def list(self, request: Request, **kw) -> Response: AND timestamp > %(start_date)s ORDER BY timestamp DESC """, - {"team_id": self.team_id, "start_date": start_date.strftime("%Y-%m-%d %H:%M:%S")}, + { + "team_id": self.team_id, + "start_date": start_date.strftime("%Y-%m-%d %H:%M:%S"), + }, ) return Response({"results": _calculate_summaries(warning_events)}) @@ -32,7 +35,12 @@ def _calculate_summaries(warning_events): for warning_type, timestamp, details in warning_events: details = json.loads(details) if warning_type not in summaries: - summaries[warning_type] = {"type": warning_type, "lastSeen": timestamp, "warnings": [], "count": 0} + summaries[warning_type] = { + "type": warning_type, + "lastSeen": timestamp, + "warnings": [], + "count": 0, + } summaries[warning_type]["warnings"].append({"type": warning_type, "timestamp": timestamp, "details": details}) summaries[warning_type]["count"] += 1 diff --git a/posthog/api/insight.py b/posthog/api/insight.py index 9412e744b4e89..683d4895aca6e 100644 --- a/posthog/api/insight.py +++ b/posthog/api/insight.py @@ -37,7 +37,11 @@ from posthog.api.tagged_item import TaggedItemSerializerMixin, TaggedItemViewSetMixin from posthog.api.utils import format_paginated_url from posthog.auth import SharingAccessTokenAuthentication -from posthog.caching.fetch_from_cache import InsightResult, fetch_cached_insight_result, synchronously_update_cache +from posthog.caching.fetch_from_cache import ( + InsightResult, + fetch_cached_insight_result, + synchronously_update_cache, +) from posthog.caching.insights_api import should_refresh_insight from posthog.client import sync_execute from posthog.constants import ( @@ -51,7 +55,9 @@ FunnelVizType, ) from posthog.decorators import cached_by_filters -from posthog.helpers.multi_property_breakdown import protect_old_clients_from_multi_property_default +from posthog.helpers.multi_property_breakdown import ( + protect_old_clients_from_multi_property_default, +) from posthog.hogql.errors import HogQLException from posthog.kafka_client.topics import KAFKA_METRICS_TIME_TO_SEE_DATA from posthog.models import DashboardTile, Filter, Insight, User @@ -74,19 +80,30 @@ ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission, ) -from posthog.queries.funnels import ClickhouseFunnelTimeToConvert, ClickhouseFunnelTrends +from posthog.queries.funnels import ( + ClickhouseFunnelTimeToConvert, + ClickhouseFunnelTrends, +) from posthog.queries.funnels.utils import get_funnel_order_class from posthog.queries.paths.paths import Paths from posthog.queries.retention import Retention from posthog.queries.stickiness import Stickiness from posthog.queries.trends.trends import Trends from posthog.queries.util import get_earliest_timestamp -from posthog.rate_limit import ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle +from posthog.rate_limit import ( + ClickHouseBurstRateThrottle, + ClickHouseSustainedRateThrottle, +) from posthog.settings import CAPTURE_TIME_TO_SEE_DATA, SITE_URL from posthog.settings.data_stores import CLICKHOUSE_CLUSTER from prometheus_client import Counter from posthog.user_permissions import UserPermissionsSerializerMixin -from posthog.utils import DEFAULT_DATE_FROM_DAYS, refresh_requested_by_client, relative_date_parse, str_to_bool +from posthog.utils import ( + DEFAULT_DATE_FROM_DAYS, + refresh_requested_by_client, + relative_date_parse, + str_to_bool, +) logger = structlog.get_logger(__name__) @@ -342,7 +359,8 @@ def update(self, instance: Insight, validated_data: Dict, **kwargs) -> Insight: dashboards_before_change = [describe_change(dt.dashboard) for dt in instance.dashboard_tiles.all()] dashboards_before_change = sorted( - dashboards_before_change, key=lambda x: -1 if isinstance(x, str) else x["id"] + dashboards_before_change, + key=lambda x: -1 if isinstance(x, str) else x["id"], ) except Insight.DoesNotExist: before_update = None @@ -506,7 +524,10 @@ def insight_result(self, insight: Insight) -> InsightResult: is_shared = self.context.get("is_shared", False) refresh_insight_now, refresh_frequency = should_refresh_insight( - insight, dashboard_tile, request=self.context["request"], is_shared=is_shared + insight, + dashboard_tile, + request=self.context["request"], + is_shared=is_shared, ) if refresh_insight_now: INSIGHT_REFRESH_INITIATED_COUNTER.labels(is_shared=is_shared).inc() diff --git a/posthog/api/insight_serializers.py b/posthog/api/insight_serializers.py index bfeebed254258..d99642d4ea1fa 100644 --- a/posthog/api/insight_serializers.py +++ b/posthog/api/insight_serializers.py @@ -48,10 +48,14 @@ def __init__(self, request=None, *args, **kwargs): super().__init__(*args, **kwargs) events = FilterEventSerializer( - required=False, many=True, help_text="Events to filter on. One of `events` or `actions` is required." + required=False, + many=True, + help_text="Events to filter on. One of `events` or `actions` is required.", ) actions = FilterActionSerializer( - required=False, many=True, help_text="Actions to filter on. One of `events` or `actions` is required." + required=False, + many=True, + help_text="Actions to filter on. One of `events` or `actions` is required.", ) properties = PropertySerializer(required=False, help_text=property_help_text) filter_test_accounts = serializers.BooleanField( @@ -125,10 +129,12 @@ class ResultsMixin(serializers.Serializer): class TrendResultSerializer(serializers.Serializer): data = serializers.ListField(child=serializers.IntegerField(), help_text="The requested counts.") # type: ignore days = serializers.ListField( - child=serializers.DateField(), help_text="The dates corresponding to the data field above." + child=serializers.DateField(), + help_text="The dates corresponding to the data field above.", ) labels = serializers.ListField( - child=serializers.CharField(), help_text="The dates corresponding to the data field above." + child=serializers.CharField(), + help_text="The dates corresponding to the data field above.", ) filter = GenericInsightsSerializer(help_text="The insight that's being returned.") label = serializers.CharField( diff --git a/posthog/api/instance_settings.py b/posthog/api/instance_settings.py index ab8b0fd45e8e3..dc0b41e5cb1da 100644 --- a/posthog/api/instance_settings.py +++ b/posthog/api/instance_settings.py @@ -4,8 +4,12 @@ from rest_framework import exceptions, mixins, permissions, serializers, viewsets from posthog.cloud_utils import is_cloud -from posthog.models.instance_setting import get_instance_setting as get_instance_setting_raw -from posthog.models.instance_setting import set_instance_setting as set_instance_setting_raw +from posthog.models.instance_setting import ( + get_instance_setting as get_instance_setting_raw, +) +from posthog.models.instance_setting import ( + set_instance_setting as set_instance_setting_raw, +) from posthog.permissions import IsStaffUser from posthog.settings import ( CONSTANCE_CONFIG, @@ -35,7 +39,14 @@ class InstanceSettingHelper: is_secret: bool = False def __init__(self, **kwargs): - for field in ("key", "value", "value_type", "description", "editable", "is_secret"): + for field in ( + "key", + "value", + "value_type", + "description", + "editable", + "is_secret", + ): setattr(self, field, kwargs.get(field, None)) @@ -83,7 +94,9 @@ def update(self, instance: InstanceSettingHelper, validated_data: Dict[str, Any] # TODO: Move to top-level imports once CH is moved out of `ee` from posthog.client import sync_execute - from posthog.session_recordings.sql.session_recording_event_sql import UPDATE_RECORDINGS_TABLE_TTL_SQL + from posthog.session_recordings.sql.session_recording_event_sql import ( + UPDATE_RECORDINGS_TABLE_TTL_SQL, + ) sync_execute(UPDATE_RECORDINGS_TABLE_TTL_SQL(), {"weeks": new_value_parsed}) @@ -95,7 +108,9 @@ def update(self, instance: InstanceSettingHelper, validated_data: Dict[str, Any] # TODO: Move to top-level imports once CH is moved out of `ee` from posthog.client import sync_execute - from posthog.models.performance.sql import UPDATE_PERFORMANCE_EVENTS_TABLE_TTL_SQL + from posthog.models.performance.sql import ( + UPDATE_PERFORMANCE_EVENTS_TABLE_TTL_SQL, + ) sync_execute(UPDATE_PERFORMANCE_EVENTS_TABLE_TTL_SQL(), {"weeks": new_value_parsed}) @@ -116,7 +131,10 @@ def update(self, instance: InstanceSettingHelper, validated_data: Dict[str, Any] class InstanceSettingsViewset( - viewsets.GenericViewSet, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin + viewsets.GenericViewSet, + mixins.ListModelMixin, + mixins.RetrieveModelMixin, + mixins.UpdateModelMixin, ): permission_classes = [permissions.IsAuthenticated, IsStaffUser] serializer_class = InstanceSettingsSerializer diff --git a/posthog/api/instance_status.py b/posthog/api/instance_status.py index c54b9b2bc071c..abdfcdad25dea 100644 --- a/posthog/api/instance_status.py +++ b/posthog/api/instance_status.py @@ -51,12 +51,19 @@ def list(self, request: Request) -> Response: "key": "helm", "metric": "Helm Info", "value": "", - "subrows": {"columns": ["key", "value"], "rows": list(helm_info.items())}, + "subrows": { + "columns": ["key", "value"], + "rows": list(helm_info.items()), + }, } ) metrics.append( - {"key": "plugin_sever_alive", "metric": "Plugin server alive", "value": is_plugin_server_alive()} + { + "key": "plugin_sever_alive", + "metric": "Plugin server alive", + "value": is_plugin_server_alive(), + } ) metrics.append( { @@ -77,7 +84,13 @@ def list(self, request: Request) -> Response: } ) - metrics.append({"key": "db_alive", "metric": "Postgres database alive", "value": postgres_alive}) + metrics.append( + { + "key": "db_alive", + "metric": "Postgres database alive", + "value": postgres_alive, + } + ) if postgres_alive: postgres_version = connection.cursor().connection.server_version metrics.append( @@ -88,7 +101,11 @@ def list(self, request: Request) -> Response: } ) metrics.append( - {"key": "async_migrations_ok", "metric": "Async migrations up-to-date", "value": async_migrations_ok()} + { + "key": "async_migrations_ok", + "metric": "Async migrations up-to-date", + "value": async_migrations_ok(), + } ) from posthog.clickhouse.system_status import system_status @@ -102,14 +119,35 @@ def list(self, request: Request) -> Response: try: redis_info = get_redis_info() redis_queue_depth = get_redis_queue_depth() - metrics.append({"metric": "Redis version", "value": f"{redis_info.get('redis_version')}"}) - metrics.append({"metric": "Redis current queue depth", "value": f"{redis_queue_depth}"}) metrics.append( - {"metric": "Redis connected client count", "value": f"{redis_info.get('connected_clients')}"} + { + "metric": "Redis version", + "value": f"{redis_info.get('redis_version')}", + } + ) + metrics.append( + { + "metric": "Redis current queue depth", + "value": f"{redis_queue_depth}", + } ) - metrics.append({"metric": "Redis memory used", "value": f"{redis_info.get('used_memory_human', '?')}B"}) metrics.append( - {"metric": "Redis memory peak", "value": f"{redis_info.get('used_memory_peak_human', '?')}B"} + { + "metric": "Redis connected client count", + "value": f"{redis_info.get('connected_clients')}", + } + ) + metrics.append( + { + "metric": "Redis memory used", + "value": f"{redis_info.get('used_memory_human', '?')}B", + } + ) + metrics.append( + { + "metric": "Redis memory peak", + "value": f"{redis_info.get('used_memory_peak_human', '?')}B", + } ) metrics.append( { @@ -118,7 +156,10 @@ def list(self, request: Request) -> Response: } ) metrics.append( - {"metric": "Redis 'maxmemory' setting", "value": f"{redis_info.get('maxmemory_human', '?')}B"} + { + "metric": "Redis 'maxmemory' setting", + "value": f"{redis_info.get('maxmemory_human', '?')}B", + } ) metrics.append( { @@ -128,15 +169,26 @@ def list(self, request: Request) -> Response: ) except redis.exceptions.ConnectionError as e: metrics.append( - {"metric": "Redis metrics", "value": f"Redis connected but then failed to return metrics: {e}"} + { + "metric": "Redis metrics", + "value": f"Redis connected but then failed to return metrics: {e}", + } ) metrics.append( - {"key": "object_storage", "metric": "Object Storage enabled", "value": settings.OBJECT_STORAGE_ENABLED} + { + "key": "object_storage", + "metric": "Object Storage enabled", + "value": settings.OBJECT_STORAGE_ENABLED, + } ) if settings.OBJECT_STORAGE_ENABLED: metrics.append( - {"key": "object_storage", "metric": "Object Storage healthy", "value": object_storage.health_check()} + { + "key": "object_storage", + "metric": "Object Storage healthy", + "value": object_storage.health_check(), + } ) return Response({"results": {"overview": metrics}}) @@ -166,7 +218,10 @@ def navigation(self, request: Request) -> Response: def queries(self, request: Request) -> Response: queries = {"postgres_running": self.get_postgres_running_queries()} - from posthog.clickhouse.system_status import get_clickhouse_running_queries, get_clickhouse_slow_log + from posthog.clickhouse.system_status import ( + get_clickhouse_running_queries, + get_clickhouse_slow_log, + ) queries["clickhouse_running"] = get_clickhouse_running_queries() queries["clickhouse_slow_log"] = get_clickhouse_slow_log() diff --git a/posthog/api/integration.py b/posthog/api/integration.py index 6c79492e8dd98..022a4f049f969 100644 --- a/posthog/api/integration.py +++ b/posthog/api/integration.py @@ -12,7 +12,10 @@ from posthog.api.shared import UserBasicSerializer from posthog.auth import PersonalAPIKeyAuthentication from posthog.models.integration import Integration, SlackIntegration -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) class IntegrationSerializer(serializers.ModelSerializer): @@ -60,7 +63,11 @@ class IntegrationViewSet( SessionAuthentication, BasicAuthentication, ] - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] @action(methods=["GET"], detail=True, url_path="channels") def content(self, request: Request, *args: Any, **kwargs: Any) -> Response: diff --git a/posthog/api/kafka_inspector.py b/posthog/api/kafka_inspector.py index cc5082a1fcacc..24865e47d40d3 100644 --- a/posthog/api/kafka_inspector.py +++ b/posthog/api/kafka_inspector.py @@ -10,6 +10,7 @@ KAFKA_CONSUMER_TIMEOUT = 1000 + # the kafka package doesn't expose ConsumerRecord class KafkaConsumerRecord: topic: str @@ -46,13 +47,13 @@ def fetch_message(self, request): partition = request.data.get("partition", None) offset = request.data.get("offset", None) - if type(topic) != str: + if not isinstance(topic, str): return Response({"error": "Invalid topic."}, status=400) - if type(partition) != int: + if not isinstance(partition, int): return Response({"error": "Invalid partition."}, status=400) - if type(offset) != int: + if not isinstance(offset, int): return Response({"error": "Invalid offset."}, status=400) try: @@ -74,7 +75,10 @@ def fetch_message(self, request): def get_kafka_message(topic: str, partition: int, offset: int) -> KafkaConsumerRecord: consumer = build_kafka_consumer( - topic=None, auto_offset_reset="earliest", group_id="kafka-inspector", consumer_timeout_ms=KAFKA_CONSUMER_TIMEOUT + topic=None, + auto_offset_reset="earliest", + group_id="kafka-inspector", + consumer_timeout_ms=KAFKA_CONSUMER_TIMEOUT, ) consumer.assign([TopicPartition(topic, partition)]) diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py index eb8ec1c1bf27d..103261dca90e6 100644 --- a/posthog/api/notebook.py +++ b/posthog/api/notebook.py @@ -6,7 +6,12 @@ from django.utils.timezone import now from django_filters.rest_framework import DjangoFilterBackend from drf_spectacular.types import OpenApiTypes -from drf_spectacular.utils import extend_schema, OpenApiParameter, extend_schema_view, OpenApiExample +from drf_spectacular.utils import ( + extend_schema, + OpenApiParameter, + extend_schema_view, + OpenApiExample, +) from rest_framework import request, serializers, viewsets from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated @@ -26,7 +31,10 @@ from posthog.models.activity_logging.activity_page import activity_page_response from posthog.models.notebook.notebook import Notebook from posthog.models.utils import UUIDT -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.settings import DEBUG from posthog.utils import relative_date_parse @@ -100,7 +108,10 @@ def create(self, validated_data: Dict, *args, **kwargs) -> Notebook: created_by = validated_data.pop("created_by", request.user) notebook = Notebook.objects.create( - team=team, created_by=created_by, last_modified_by=request.user, **validated_data + team=team, + created_by=created_by, + last_modified_by=request.user, + **validated_data, ) log_notebook_activity( @@ -162,7 +173,10 @@ def update(self, instance: Notebook, validated_data: Dict, **kwargs) -> Notebook parameters=[ OpenApiParameter("short_id", exclude=True), OpenApiParameter( - "created_by", OpenApiTypes.INT, description="The UUID of the Notebook's creator", required=False + "created_by", + OpenApiTypes.INT, + description="The UUID of the Notebook's creator", + required=False, ), OpenApiParameter( "user", @@ -208,7 +222,11 @@ def update(self, instance: Notebook, validated_data: Dict, **kwargs) -> Notebook class NotebookViewSet(StructuredViewSetMixin, ForbidDestroyModel, viewsets.ModelViewSet): queryset = Notebook.objects.all() serializer_class = NotebookSerializer - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] filter_backends = [DjangoFilterBackend] filterset_fields = ["short_id"] # TODO: Remove this once we have released notebooks @@ -291,7 +309,16 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query presence_match_structure = [{"content": [{"type": f"ph-{target}"}]}] id_match_structure = [{"content": [{"attrs": {"sessionRecordingId": match}}]}] elif target == "query": - id_match_structure = [{"attrs": {"query": {"kind": "SavedInsightNode", "shortId": match}}}] + id_match_structure = [ + { + "attrs": { + "query": { + "kind": "SavedInsightNode", + "shortId": match, + } + } + } + ] if match == "true" or match is None: queryset = queryset.filter(content__content__contains=presence_match_structure) @@ -318,6 +345,10 @@ def activity(self, request: request.Request, **kwargs): page = int(request.query_params.get("page", "1")) activity_page = load_activity( - scope="Notebook", team_id=self.team_id, item_id=notebook.id, limit=limit, page=page + scope="Notebook", + team_id=self.team_id, + item_id=notebook.id, + limit=limit, + page=page, ) return activity_page_response(activity_page, limit, page, request) diff --git a/posthog/api/organization.py b/posthog/api/organization.py index f8d5de45138f6..b0de79ff52297 100644 --- a/posthog/api/organization.py +++ b/posthog/api/organization.py @@ -115,7 +115,8 @@ def get_membership_level(self, organization: Organization) -> Optional[Organizat def get_teams(self, instance: Organization) -> List[Dict[str, Any]]: teams = cast( - List[Dict[str, Any]], TeamBasicSerializer(instance.teams.all(), context=self.context, many=True).data + List[Dict[str, Any]], + TeamBasicSerializer(instance.teams.all(), context=self.context, many=True).data, ) visible_team_ids = set(self.user_permissions.team_ids_visible_for_user) return [team for team in teams if team["id"] in visible_team_ids] @@ -141,7 +142,13 @@ def get_permissions(self): if self.request.method == "POST": # Cannot use `OrganizationMemberPermissions` or `OrganizationAdminWritePermissions` # because they require an existing org, unneded anyways because permissions are organization-based - return [permission() for permission in [permissions.IsAuthenticated, PremiumMultiorganizationPermissions]] + return [ + permission() + for permission in [ + permissions.IsAuthenticated, + PremiumMultiorganizationPermissions, + ] + ] return super().get_permissions() def get_queryset(self) -> QuerySet: @@ -170,11 +177,19 @@ def perform_destroy(self, organization: Organization): # Once the organization is deleted, queue deletion of associated data AsyncDeletion.objects.bulk_create( [ - AsyncDeletion(deletion_type=DeletionType.Team, team_id=team_id, key=str(team_id), created_by=user) + AsyncDeletion( + deletion_type=DeletionType.Team, + team_id=team_id, + key=str(team_id), + created_by=user, + ) for team_id in team_ids ], ignore_conflicts=True, ) def get_serializer_context(self) -> Dict[str, Any]: - return {**super().get_serializer_context(), "user_permissions": UserPermissions(cast(User, self.request.user))} + return { + **super().get_serializer_context(), + "user_permissions": UserPermissions(cast(User, self.request.user)), + } diff --git a/posthog/api/organization_domain.py b/posthog/api/organization_domain.py index 50b815f467664..bcf30b4eac798 100644 --- a/posthog/api/organization_domain.py +++ b/posthog/api/organization_domain.py @@ -9,13 +9,15 @@ from posthog.api.routing import StructuredViewSetMixin from posthog.cloud_utils import is_cloud from posthog.models import OrganizationDomain -from posthog.permissions import OrganizationAdminWritePermissions, OrganizationMemberPermissions +from posthog.permissions import ( + OrganizationAdminWritePermissions, + OrganizationMemberPermissions, +) DOMAIN_REGEX = r"^([a-z0-9]+(-[a-z0-9]+)*\.)+[a-z]{2,}$" class OrganizationDomainSerializer(serializers.ModelSerializer): - UPDATE_ONLY_WHEN_VERIFIED = ["jit_provisioning_enabled", "sso_enforcement"] class Meta: @@ -74,7 +76,11 @@ def validate(self, attrs: Dict[str, Any]) -> Dict[str, Any]: class OrganizationDomainViewset(StructuredViewSetMixin, ModelViewSet): serializer_class = OrganizationDomainSerializer - permission_classes = [IsAuthenticated, OrganizationMemberPermissions, OrganizationAdminWritePermissions] + permission_classes = [ + IsAuthenticated, + OrganizationMemberPermissions, + OrganizationAdminWritePermissions, + ] queryset = OrganizationDomain.objects.all() def get_queryset(self): diff --git a/posthog/api/organization_invite.py b/posthog/api/organization_invite.py index 68bd6d6424b46..a5728213f85db 100644 --- a/posthog/api/organization_invite.py +++ b/posthog/api/organization_invite.py @@ -1,6 +1,14 @@ from typing import Any, Dict, cast -from rest_framework import exceptions, mixins, request, response, serializers, status, viewsets +from rest_framework import ( + exceptions, + mixins, + request, + response, + serializers, + status, + viewsets, +) from rest_framework.decorators import action from rest_framework.permissions import IsAuthenticated @@ -41,7 +49,8 @@ class Meta: def create(self, validated_data: Dict[str, Any], *args: Any, **kwargs: Any) -> OrganizationInvite: if OrganizationMembership.objects.filter( - organization_id=self.context["organization_id"], user__email=validated_data["target_email"] + organization_id=self.context["organization_id"], + user__email=validated_data["target_email"], ).exists(): raise exceptions.ValidationError("A user with this email address already belongs to the organization.") invite: OrganizationInvite = OrganizationInvite.objects.create( @@ -101,7 +110,9 @@ def bulk(self, request: request.Request, **kwargs) -> response.Response: ) serializer = OrganizationInviteSerializer( - data=data, many=True, context={**self.get_serializer_context(), "bulk_create": True} + data=data, + many=True, + context={**self.get_serializer_context(), "bulk_create": True}, ) serializer.is_valid(raise_exception=True) serializer.save() diff --git a/posthog/api/organization_member.py b/posthog/api/organization_member.py index ca24b262a6673..d6d90c1280d06 100644 --- a/posthog/api/organization_member.py +++ b/posthog/api/organization_member.py @@ -26,7 +26,7 @@ def has_object_permission(self, request: Request, view, membership: Organization if request.method in SAFE_METHODS: return True organization = extract_organization(membership) - requesting_membership: OrganizationMembership = OrganizationMembership.objects.get( + requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get( user_id=cast(User, request.user).id, organization=organization, ) @@ -44,7 +44,15 @@ class OrganizationMemberSerializer(serializers.ModelSerializer): class Meta: model = OrganizationMembership - fields = ["id", "user", "level", "joined_at", "updated_at", "is_2fa_enabled", "has_social_auth"] + fields = [ + "id", + "user", + "level", + "joined_at", + "updated_at", + "is_2fa_enabled", + "has_social_auth", + ] read_only_fields = ["id", "joined_at", "updated_at"] def get_is_2fa_enabled(self, instance: OrganizationMembership) -> bool: @@ -58,7 +66,7 @@ def get_has_social_auth(self, instance: OrganizationMembership) -> bool: def update(self, updated_membership, validated_data, **kwargs): updated_membership = cast(OrganizationMembership, updated_membership) raise_errors_on_nested_writes("update", self, validated_data) - requesting_membership: OrganizationMembership = OrganizationMembership.objects.get( + requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get( organization=updated_membership.organization, user=self.context["request"].user, ) @@ -78,7 +86,11 @@ class OrganizationMemberViewSet( viewsets.GenericViewSet, ): serializer_class = OrganizationMemberSerializer - permission_classes = [IsAuthenticated, OrganizationMemberPermissions, OrganizationMemberObjectPermissions] + permission_classes = [ + IsAuthenticated, + OrganizationMemberPermissions, + OrganizationMemberObjectPermissions, + ] queryset = ( OrganizationMembership.objects.order_by("user__first_name", "-joined_at") .exclude(user__email__endswith=INTERNAL_BOT_EMAIL_SUFFIX) @@ -87,7 +99,10 @@ class OrganizationMemberViewSet( ) .select_related("user") .prefetch_related( - Prefetch("user__totpdevice_set", queryset=TOTPDevice.objects.filter(name="default")), + Prefetch( + "user__totpdevice_set", + queryset=TOTPDevice.objects.filter(name="default"), + ), Prefetch("user__social_auth", queryset=UserSocialAuth.objects.all()), ) ) diff --git a/posthog/api/person.py b/posthog/api/person.py index 3e2c72063c26c..f77363af23b5b 100644 --- a/posthog/api/person.py +++ b/posthog/api/person.py @@ -32,11 +32,23 @@ from posthog.api.documentation import PersonPropertiesSerializer, extend_schema from posthog.api.routing import StructuredViewSetMixin from posthog.api.utils import format_paginated_url, get_pk_or_uuid, get_target_entity -from posthog.constants import CSV_EXPORT_LIMIT, INSIGHT_FUNNELS, INSIGHT_PATHS, LIMIT, OFFSET, FunnelVizType +from posthog.constants import ( + CSV_EXPORT_LIMIT, + INSIGHT_FUNNELS, + INSIGHT_PATHS, + LIMIT, + OFFSET, + FunnelVizType, +) from posthog.decorators import cached_by_filters from posthog.logging.timing import timed from posthog.models import Cohort, Filter, Person, User, Team -from posthog.models.activity_logging.activity_log import Change, Detail, load_activity, log_activity +from posthog.models.activity_logging.activity_log import ( + Change, + Detail, + load_activity, + log_activity, +) from posthog.models.activity_logging.activity_page import activity_page_response from posthog.models.async_deletion import AsyncDeletion, DeletionType from posthog.models.cohort.util import get_all_cohort_ids_by_person_uuid @@ -46,11 +58,20 @@ from posthog.models.filters.retention_filter import RetentionFilter from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.person.util import delete_person -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission -from posthog.queries.actor_base_query import ActorBaseQuery, get_people, serialize_people +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) +from posthog.queries.actor_base_query import ( + ActorBaseQuery, + get_people, + serialize_people, +) from posthog.queries.funnels import ClickhouseFunnelActors, ClickhouseFunnelTrendsActors from posthog.queries.funnels.funnel_strict_persons import ClickhouseFunnelStrictActors -from posthog.queries.funnels.funnel_unordered_persons import ClickhouseFunnelUnorderedActors +from posthog.queries.funnels.funnel_unordered_persons import ( + ClickhouseFunnelUnorderedActors, +) from posthog.queries.insight import insight_sync_execute from posthog.queries.paths import PathsActors from posthog.queries.person_query import PersonQuery @@ -61,10 +82,17 @@ from posthog.queries.trends.lifecycle import Lifecycle from posthog.queries.trends.trends_actors import TrendsActors from posthog.queries.util import get_earliest_timestamp -from posthog.rate_limit import ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle +from posthog.rate_limit import ( + ClickHouseBurstRateThrottle, + ClickHouseSustainedRateThrottle, +) from posthog.settings import EE_AVAILABLE from posthog.tasks.split_person import split_person -from posthog.utils import convert_property_value, format_query_params_absolute_url, is_anonymous_id +from posthog.utils import ( + convert_property_value, + format_query_params_absolute_url, + is_anonymous_id, +) from prometheus_client import Counter from posthog.metrics import LABEL_TEAM_ID @@ -163,7 +191,9 @@ def get_funnel_actor_class(filter: Filter) -> Callable: if filter.correlation_person_entity and EE_AVAILABLE: if EE_AVAILABLE: - from ee.clickhouse.queries.funnels.funnel_correlation_persons import FunnelCorrelationActors + from ee.clickhouse.queries.funnels.funnel_correlation_persons import ( + FunnelCorrelationActors, + ) funnel_actor_class = FunnelCorrelationActors else: @@ -192,7 +222,11 @@ class PersonViewSet(StructuredViewSetMixin, viewsets.ModelViewSet): queryset = Person.objects.all() serializer_class = PersonSerializer pagination_class = PersonLimitOffsetPagination - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] throttle_classes = [ClickHouseBurstRateThrottle, PersonsThrottle] lifecycle_class = Lifecycle retention_class = Retention @@ -230,7 +264,11 @@ def get_object(self): description="Filter persons by email (exact match)", examples=[OpenApiExample(name="email", value="test@test.com")], ), - OpenApiParameter("distinct_id", OpenApiTypes.STR, description="Filter list by distinct id."), + OpenApiParameter( + "distinct_id", + OpenApiTypes.STR, + description="Filter list by distinct id.", + ), OpenApiParameter( "search", OpenApiTypes.STR, @@ -276,7 +314,12 @@ def list(self, request: request.Request, *args: Any, **kwargs: Any) -> response. ) persons = [] for p in actors: - person = Person(uuid=p[0], created_at=p[1], is_identified=p[2], properties=json.loads(p[3])) + person = Person( + uuid=p[0], + created_at=p[1], + is_identified=p[2], + properties=json.loads(p[3]), + ) person._distinct_ids = p[4] persons.append(person) @@ -387,7 +430,12 @@ def values(self, request: request.Request, **kwargs) -> response.Response: for value, count in result: try: # Try loading as json for dicts or arrays - flattened.append({"name": convert_property_value(json.loads(value)), "count": count}) # type: ignore + flattened.append( + { + "name": convert_property_value(json.loads(value)), # type: ignore + "count": count, + } + ) except json.decoder.JSONDecodeError: flattened.append({"name": convert_property_value(value), "count": count}) return response.Response(flattened) @@ -403,7 +451,12 @@ def _get_person_property_values_for_key(self, key, value): except Exception as e: statsd.incr( "get_person_property_values_for_key_error", - tags={"error": str(e), "key": key, "value": value, "team_id": self.team.id}, + tags={ + "error": str(e), + "key": key, + "value": value, + "team_id": self.team.id, + }, ) raise e @@ -425,7 +478,13 @@ def split(self, request: request.Request, pk=None, **kwargs) -> response.Respons activity="split_person", detail=Detail( name=str(person.uuid), - changes=[Change(type="Person", action="split", after={"distinct_ids": distinct_ids})], + changes=[ + Change( + type="Person", + action="split", + after={"distinct_ids": distinct_ids}, + ) + ], ), ) @@ -433,20 +492,40 @@ def split(self, request: request.Request, pk=None, **kwargs) -> response.Respons @extend_schema( parameters=[ - OpenApiParameter("key", OpenApiTypes.STR, description="Specify the property key", required=True), - OpenApiParameter("value", OpenApiTypes.ANY, description="Specify the property value", required=True), + OpenApiParameter( + "key", + OpenApiTypes.STR, + description="Specify the property key", + required=True, + ), + OpenApiParameter( + "value", + OpenApiTypes.ANY, + description="Specify the property value", + required=True, + ), ] ) @action(methods=["POST"], detail=True) def update_property(self, request: request.Request, pk=None, **kwargs) -> response.Response: if request.data.get("value") is None: return Response( - {"attr": "value", "code": "This field is required.", "detail": "required", "type": "validation_error"}, + { + "attr": "value", + "code": "This field is required.", + "detail": "required", + "type": "validation_error", + }, status=400, ) if request.data.get("key") is None: return Response( - {"attr": "key", "code": "This field is required.", "detail": "required", "type": "validation_error"}, + { + "attr": "key", + "code": "This field is required.", + "detail": "required", + "type": "validation_error", + }, status=400, ) self._set_properties({request.data["key"]: request.data["value"]}, request.user) @@ -455,7 +534,10 @@ def update_property(self, request: request.Request, pk=None, **kwargs) -> respon @extend_schema( parameters=[ OpenApiParameter( - "$unset", OpenApiTypes.STR, description="Specify the property key to delete", required=True + "$unset", + OpenApiTypes.STR, + description="Specify the property key to delete", + required=True, ), ] ) @@ -497,7 +579,10 @@ def cohorts(self, request: request.Request) -> response.Response: team = cast(User, request.user).team if not team: return response.Response( - {"message": "Could not retrieve team", "detail": "Could not validate team associated with user"}, + { + "message": "Could not retrieve team", + "detail": "Could not validate team associated with user", + }, status=400, ) @@ -525,7 +610,13 @@ def activity(self, request: request.Request, pk=None, **kwargs): person = self.get_object() item_id = person.pk - activity_page = load_activity(scope="Person", team_id=self.team_id, item_id=item_id, limit=limit, page=page) + activity_page = load_activity( + scope="Person", + team_id=self.team_id, + item_id=item_id, + limit=limit, + page=page, + ) return activity_page_response(activity_page, limit, page, request) def update(self, request, *args, **kwargs): @@ -619,7 +710,14 @@ def calculate_funnel_persons( next_url = paginated_result(request, raw_count, filter.offset, filter.limit) # cached_function expects a dict with the key result - return {"result": (serialized_actors, next_url, initial_url, raw_count - len(serialized_actors))} + return { + "result": ( + serialized_actors, + next_url, + initial_url, + raw_count - len(serialized_actors), + ) + } @action(methods=["GET", "POST"], detail=False) def path(self, request: request.Request, **kwargs) -> response.Response: @@ -647,7 +745,14 @@ def calculate_path_persons( initial_url = format_query_params_absolute_url(request, 0) # cached_function expects a dict with the key result - return {"result": (serialized_actors, next_url, initial_url, raw_count - len(serialized_actors))} + return { + "result": ( + serialized_actors, + next_url, + initial_url, + raw_count - len(serialized_actors), + ) + } @action(methods=["GET"], detail=False) def trends(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: @@ -669,7 +774,14 @@ def calculate_trends_persons( initial_url = format_query_params_absolute_url(request, 0) # cached_function expects a dict with the key result - return {"result": (serialized_actors, next_url, initial_url, raw_count - len(serialized_actors))} + return { + "result": ( + serialized_actors, + next_url, + initial_url, + raw_count - len(serialized_actors), + ) + } @action(methods=["GET"], detail=True) def properties_timeline(self, request: request.Request, *args: Any, **kwargs: Any) -> Response: @@ -688,19 +800,30 @@ def lifecycle(self, request: request.Request) -> response.Response: team = cast(User, request.user).team if not team: return response.Response( - {"message": "Could not retrieve team", "detail": "Could not validate team associated with user"}, + { + "message": "Could not retrieve team", + "detail": "Could not validate team associated with user", + }, status=400, ) target_date = request.GET.get("target_date", None) if target_date is None: return response.Response( - {"message": "Missing parameter", "detail": "Must include specified date"}, status=400 + { + "message": "Missing parameter", + "detail": "Must include specified date", + }, + status=400, ) lifecycle_type = request.GET.get("lifecycle_type", None) if lifecycle_type is None: return response.Response( - {"message": "Missing parameter", "detail": "Must include lifecycle type"}, status=400 + { + "message": "Missing parameter", + "detail": "Must include lifecycle type", + }, + status=400, ) filter = LifecycleFilter(request=request, data=request.GET.dict(), team=self.team) @@ -718,7 +841,10 @@ def retention(self, request: request.Request) -> response.Response: team = cast(User, request.user).team if not team: return response.Response( - {"message": "Could not retrieve team", "detail": "Could not validate team associated with user"}, + { + "message": "Could not retrieve team", + "detail": "Could not validate team associated with user", + }, status=400, ) filter = RetentionFilter(request=request, team=team) @@ -729,14 +855,23 @@ def retention(self, request: request.Request) -> response.Response: next_url = paginated_result(request, raw_count, filter.offset, filter.limit) - return response.Response({"result": people, "next": next_url, "missing_persons": raw_count - len(people)}) + return response.Response( + { + "result": people, + "next": next_url, + "missing_persons": raw_count - len(people), + } + ) @action(methods=["GET"], detail=False) def stickiness(self, request: request.Request) -> response.Response: team = cast(User, request.user).team if not team: return response.Response( - {"message": "Could not retrieve team", "detail": "Could not validate team associated with user"}, + { + "message": "Could not retrieve team", + "detail": "Could not validate team associated with user", + }, status=400, ) filter = StickinessFilter(request=request, team=team, get_earliest_timestamp=get_earliest_timestamp) @@ -791,9 +926,19 @@ def prepare_actor_query_filter(filter: T) -> T: new_group = { "type": "OR", "values": [ - {"key": "email", "type": "person", "value": search, "operator": "icontains"}, + { + "key": "email", + "type": "person", + "value": search, + "operator": "icontains", + }, {"key": "name", "type": "person", "value": search, "operator": "icontains"}, - {"key": "distinct_id", "type": "event", "value": search, "operator": "icontains"}, + { + "key": "distinct_id", + "type": "event", + "value": search, + "operator": "icontains", + }, ] + group_properties_filter_group, } diff --git a/posthog/api/plugin.py b/posthog/api/plugin.py index 4513ccc58996c..b39c18a67346a 100644 --- a/posthog/api/plugin.py +++ b/posthog/api/plugin.py @@ -32,7 +32,11 @@ from posthog.models.activity_logging.activity_page import activity_page_response from posthog.models.activity_logging.serializers import ActivityLogSerializer from posthog.models.organization import Organization -from posthog.models.plugin import PluginSourceFile, update_validated_data_from_url, validate_plugin_job_payload +from posthog.models.plugin import ( + PluginSourceFile, + update_validated_data_from_url, + validate_plugin_job_payload, +) from posthog.models.utils import UUIDT, generate_random_token from posthog.permissions import ( OrganizationMemberPermissions, @@ -66,7 +70,10 @@ def get_plugin_config_changes(old_config: Dict[str, Any], new_config: Dict[str, for i, change in enumerate(config_changes): if change.field in secret_fields: config_changes[i] = Change( - type="PluginConfig", action=change.action, before=SECRET_FIELD_VALUE, after=SECRET_FIELD_VALUE + type="PluginConfig", + action=change.action, + before=SECRET_FIELD_VALUE, + after=SECRET_FIELD_VALUE, ) return config_changes @@ -87,10 +94,16 @@ def log_enabled_change_activity(new_plugin_config: PluginConfig, old_enabled: bo def log_config_update_activity( - new_plugin_config: PluginConfig, old_config: Dict[str, Any], secret_fields: Set[str], old_enabled: bool, user: User + new_plugin_config: PluginConfig, + old_config: Dict[str, Any], + secret_fields: Set[str], + old_enabled: bool, + user: User, ): config_changes = get_plugin_config_changes( - old_config=old_config, new_config=new_plugin_config.config, secret_fields=secret_fields + old_config=old_config, + new_config=new_plugin_config.config, + secret_fields=secret_fields, ) if len(config_changes) > 0: @@ -113,7 +126,12 @@ def _update_plugin_attachment(plugin_config: PluginConfig, key: str, file: Optio plugin_attachment = PluginAttachment.objects.get(team=plugin_config.team, plugin_config=plugin_config, key=key) if file: activity = "attachment_updated" - change = Change(type="PluginConfig", action="changed", before=plugin_attachment.file_name, after=file.name) + change = Change( + type="PluginConfig", + action="changed", + before=plugin_attachment.file_name, + after=file.name, + ) plugin_attachment.content_type = file.content_type plugin_attachment.file_name = file.name @@ -124,7 +142,12 @@ def _update_plugin_attachment(plugin_config: PluginConfig, key: str, file: Optio plugin_attachment.delete() activity = "attachment_deleted" - change = Change(type="PluginConfig", action="deleted", before=plugin_attachment.file_name, after=None) + change = Change( + type="PluginConfig", + action="deleted", + before=plugin_attachment.file_name, + after=None, + ) except ObjectDoesNotExist: if file: PluginAttachment.objects.create( @@ -312,7 +335,8 @@ def check_for_updates(self, request: request.Request, **kwargs): # use update to not trigger the post_save signal and avoid telling the plugin server to reload vms Plugin.objects.filter(id=plugin.id).update( - latest_tag=latest_url.get("tag", latest_url.get("version", None)), latest_tag_checked_at=now() + latest_tag=latest_url.get("tag", latest_url.get("version", None)), + latest_tag_checked_at=now(), ) plugin.refresh_from_db() @@ -379,7 +403,10 @@ def update_source(self, request: request.Request, **kwargs): def upgrade(self, request: request.Request, **kwargs): plugin = self.get_plugin_with_permissions(reason="upgrading") serializer = PluginSerializer(plugin, context=self.get_serializer_context()) - if plugin.plugin_type not in (Plugin.PluginType.SOURCE, Plugin.PluginType.LOCAL): + if plugin.plugin_type not in ( + Plugin.PluginType.SOURCE, + Plugin.PluginType.LOCAL, + ): validated_data: Dict[str, Any] = {} plugin_json = update_validated_data_from_url(validated_data, plugin.url) with transaction.atomic(): @@ -430,7 +457,12 @@ def all_activity(self, request: request.Request, **kwargs): limit = int(request.query_params.get("limit", "10")) page = int(request.query_params.get("page", "1")) - activity_page = load_all_activity(scope_list=["Plugin", "PluginConfig"], team_id=request.user.team.id, limit=limit, page=page) # type: ignore + activity_page = load_all_activity( + scope_list=["Plugin", "PluginConfig"], + team_id=request.user.team.id, # type: ignore + limit=limit, + page=page, + ) return activity_page_response(activity_page, limit, page, request) @@ -472,7 +504,13 @@ class Meta: "delivery_rate_24h", "created_at", ] - read_only_fields = ["id", "team_id", "plugin_info", "delivery_rate_24h", "created_at"] + read_only_fields = [ + "id", + "team_id", + "plugin_info", + "delivery_rate_24h", + "created_at", + ] def get_config(self, plugin_config: PluginConfig): attachments = PluginAttachment.objects.filter(plugin_config=plugin_config).only( @@ -547,7 +585,13 @@ def create(self, validated_data: Dict, *args: Any, **kwargs: Any) -> PluginConfi _update_plugin_attachments(self.context["request"], plugin_config) return plugin_config - def update(self, plugin_config: PluginConfig, validated_data: Dict, *args: Any, **kwargs: Any) -> PluginConfig: # type: ignore + def update( # type: ignore + self, + plugin_config: PluginConfig, + validated_data: Dict, + *args: Any, + **kwargs: Any, + ) -> PluginConfig: _fix_formdata_config_json(self.context["request"], validated_data) validated_data.pop("plugin", None) @@ -631,7 +675,15 @@ def rearrange(self, request: request.Request, **kwargs): activity="order_changed", detail=Detail( name=plugin_config.plugin.name, - changes=[Change(type="Plugin", before=old_order, after=order, action="changed", field="order")], + changes=[ + Change( + type="Plugin", + before=old_order, + after=order, + action="changed", + field="order", + ) + ], ), ) diff --git a/posthog/api/plugin_log_entry.py b/posthog/api/plugin_log_entry.py index 4cf4fe08aeb69..4d8353dd2adc3 100644 --- a/posthog/api/plugin_log_entry.py +++ b/posthog/api/plugin_log_entry.py @@ -7,8 +7,15 @@ from posthog.api.plugin import PluginOwnershipPermission, PluginsAccessLevelPermission from posthog.api.routing import StructuredViewSetMixin -from posthog.models.plugin import PluginLogEntry, PluginLogEntryType, fetch_plugin_log_entries -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.models.plugin import ( + PluginLogEntry, + PluginLogEntryType, + fetch_plugin_log_entries, +) +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) class PluginLogEntrySerializer(DataclassSerializer): diff --git a/posthog/api/prompt.py b/posthog/api/prompt.py index 71ea5bebcebbb..b50d56103276a 100644 --- a/posthog/api/prompt.py +++ b/posthog/api/prompt.py @@ -46,7 +46,16 @@ class PromptSequenceSerializer(serializers.ModelSerializer): class Meta: model = PromptSequence - fields = ["key", "path_match", "path_exclude", "requires_opt_in", "type", "status", "prompts", "autorun"] + fields = [ + "key", + "path_match", + "path_exclude", + "requires_opt_in", + "type", + "status", + "prompts", + "autorun", + ] class UserPromptStateSerializer(serializers.ModelSerializer): @@ -132,7 +141,10 @@ def my_prompts(self, request: request.Request, **kwargs): sequence = state.sequence must_have_completed = sequence.must_have_completed.all() if len(must_have_completed) > 0: - current_state = next((s for s in up_to_date_states if s.sequence in must_have_completed), None) + current_state = next( + (s for s in up_to_date_states if s.sequence in must_have_completed), + None, + ) if not current_state or (current_state and not current_state.completed): continue my_prompts["state"][sequence.key] = UserPromptStateSerializer(state).data @@ -162,7 +174,15 @@ class WebhookSequenceSerializer(serializers.ModelSerializer): class Meta: model = PromptSequence - fields = ["key", "path_match", "path_exclude", "type", "status", "requires_opt_in", "autorun"] + fields = [ + "key", + "path_match", + "path_exclude", + "type", + "status", + "requires_opt_in", + "autorun", + ] @app.task(ignore_result=True) @@ -177,7 +197,6 @@ def trigger_prompt_for_user(email: str, sequence_id: int): @csrf_exempt def prompt_webhook(request: request.Request): - if request.method == "POST": data = json.loads(request.body) else: diff --git a/posthog/api/property_definition.py b/posthog/api/property_definition.py index 8481e6ae4cbf5..6da00daecf445 100644 --- a/posthog/api/property_definition.py +++ b/posthog/api/property_definition.py @@ -4,7 +4,15 @@ from django.db import connection from django.db.models import Prefetch -from rest_framework import mixins, permissions, serializers, viewsets, status, request, response +from rest_framework import ( + mixins, + permissions, + serializers, + viewsets, + status, + request, + response, +) from rest_framework.decorators import action from rest_framework.exceptions import ValidationError from rest_framework.pagination import LimitOffsetPagination @@ -19,7 +27,10 @@ from posthog.models import PropertyDefinition, TaggedItem, User, EventProperty from posthog.models.activity_logging.activity_log import log_activity, Detail from posthog.models.utils import UUIDT -from posthog.permissions import OrganizationMemberPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + OrganizationMemberPermissions, + TeamMemberAccessPermission, +) class SeenTogetherQuerySerializer(serializers.Serializer): @@ -140,7 +151,8 @@ def with_properties_to_filter(self, properties_to_filter: Optional[str]) -> "Que def with_is_numerical_flag(self, is_numerical: Optional[str]) -> "QueryContext": if is_numerical: return dataclasses.replace( - self, numerical_filter="AND is_numerical = true AND name NOT IN ('distinct_id', 'timestamp')" + self, + numerical_filter="AND is_numerical = true AND name NOT IN ('distinct_id', 'timestamp')", ) else: return self @@ -164,19 +176,32 @@ def with_feature_flags(self, is_feature_flag: Optional[bool]) -> "QueryContext": def with_type_filter(self, type: str, group_type_index: Optional[int]): if type == "event": return dataclasses.replace( - self, params={**self.params, "type": PropertyDefinition.Type.EVENT, "group_type_index": -1} + self, + params={ + **self.params, + "type": PropertyDefinition.Type.EVENT, + "group_type_index": -1, + }, ) elif type == "person": return dataclasses.replace( self, should_join_event_property=False, - params={**self.params, "type": PropertyDefinition.Type.PERSON, "group_type_index": -1}, + params={ + **self.params, + "type": PropertyDefinition.Type.PERSON, + "group_type_index": -1, + }, ) elif type == "group": return dataclasses.replace( self, should_join_event_property=False, - params={**self.params, "type": PropertyDefinition.Type.GROUP, "group_type_index": group_type_index}, + params={ + **self.params, + "type": PropertyDefinition.Type.GROUP, + "group_type_index": group_type_index, + }, ) def with_event_property_filter( @@ -207,7 +232,9 @@ def with_event_property_filter( def with_search(self, search_query: str, search_kwargs: Dict) -> "QueryContext": return dataclasses.replace( - self, search_query=search_query, params={**self.params, "team_id": self.team_id, **search_kwargs} + self, + search_query=search_query, + params={**self.params, "team_id": self.team_id, **search_kwargs}, ) def with_excluded_properties(self, excluded_properties: Optional[str], type: str) -> "QueryContext": @@ -215,7 +242,10 @@ def with_excluded_properties(self, excluded_properties: Optional[str], type: str excluded_properties = json.loads(excluded_properties) excluded_list = tuple( - set.union(set(excluded_properties or []), EVENTS_HIDDEN_PROPERTY_DEFINITIONS if type == "event" else []) + set.union( + set(excluded_properties or []), + EVENTS_HIDDEN_PROPERTY_DEFINITIONS if type == "event" else [], + ) ) return dataclasses.replace( self, @@ -422,7 +452,11 @@ class PropertyDefinitionViewSet( viewsets.GenericViewSet, ): serializer_class = PropertyDefinitionSerializer - permission_classes = [permissions.IsAuthenticated, OrganizationMemberPermissions, TeamMemberAccessPermission] + permission_classes = [ + permissions.IsAuthenticated, + OrganizationMemberPermissions, + TeamMemberAccessPermission, + ] lookup_field = "id" filter_backends = [TermSearchFilterBackend] ordering = "name" @@ -433,10 +467,16 @@ def get_queryset(self): queryset = PropertyDefinition.objects property_definition_fields = ", ".join( - [f'posthog_propertydefinition."{f.column}"' for f in PropertyDefinition._meta.get_fields() if hasattr(f, "column")] # type: ignore + [ + f'posthog_propertydefinition."{f.column}"' # type: ignore + for f in PropertyDefinition._meta.get_fields() + if hasattr(f, "column") + ] ) - use_enterprise_taxonomy = self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY) # type: ignore + use_enterprise_taxonomy = self.request.user.organization.is_feature_available( # type: ignore + AvailableFeature.INGESTION_TAXONOMY + ) order_by_verified = False if use_enterprise_taxonomy: try: @@ -453,7 +493,9 @@ def get_queryset(self): queryset = EnterprisePropertyDefinition.objects.prefetch_related( Prefetch( - "tagged_items", queryset=TaggedItem.objects.select_related("tag"), to_attr="prefetched_tags" + "tagged_items", + queryset=TaggedItem.objects.select_related("tag"), + to_attr="prefetched_tags", ) ) order_by_verified = True @@ -483,7 +525,10 @@ def get_queryset(self): limit=limit, offset=offset, ) - .with_type_filter(query.validated_data.get("type"), query.validated_data.get("group_type_index")) + .with_type_filter( + query.validated_data.get("type"), + query.validated_data.get("group_type_index"), + ) .with_properties_to_filter(query.validated_data.get("properties")) .with_is_numerical_flag(query.validated_data.get("is_numerical")) .with_feature_flags(query.validated_data.get("is_feature_flag")) @@ -493,7 +538,8 @@ def get_queryset(self): ) .with_search(search_query, search_kwargs) .with_excluded_properties( - query.validated_data.get("excluded_properties"), type=query.validated_data.get("type") + query.validated_data.get("excluded_properties"), + type=query.validated_data.get("type"), ) ) @@ -509,7 +555,9 @@ def get_serializer_class(self) -> Type[serializers.ModelSerializer]: serializer_class = self.serializer_class if self.request.user.organization.is_feature_available(AvailableFeature.INGESTION_TAXONOMY): # type: ignore try: - from ee.api.ee_property_definition import EnterprisePropertyDefinitionSerializer + from ee.api.ee_property_definition import ( + EnterprisePropertyDefinitionSerializer, + ) except ImportError: pass else: @@ -581,7 +629,9 @@ def destroy(self, request: request.Request, *args: Any, **kwargs: Any) -> respon scope="PropertyDefinition", activity="deleted", detail=Detail( - name=cast(str, instance.name), type=PropertyDefinition.Type(instance.type).label, changes=None + name=cast(str, instance.name), + type=PropertyDefinition.Type(instance.type).label, + changes=None, ), ) return response.Response(status=status.HTTP_204_NO_CONTENT) diff --git a/posthog/api/query.py b/posthog/api/query.py index 25b64896a0809..5c1773e671a20 100644 --- a/posthog/api/query.py +++ b/posthog/api/query.py @@ -29,10 +29,20 @@ from posthog.hogql_queries.query_runner import get_query_runner from posthog.models import Team from posthog.models.user import User -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission -from posthog.queries.time_to_see_data.serializers import SessionEventsQuerySerializer, SessionsQuerySerializer +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) +from posthog.queries.time_to_see_data.serializers import ( + SessionEventsQuerySerializer, + SessionsQuerySerializer, +) from posthog.queries.time_to_see_data.sessions import get_session_events, get_sessions -from posthog.rate_limit import AIBurstRateThrottle, AISustainedRateThrottle, TeamRateThrottle +from posthog.rate_limit import ( + AIBurstRateThrottle, + AISustainedRateThrottle, + TeamRateThrottle, +) from posthog.schema import HogQLMetadata from posthog.utils import refresh_requested_by_client @@ -80,7 +90,11 @@ def parse(self, stream, media_type=None, parser_context=None): class QueryViewSet(StructuredViewSetMixin, viewsets.ViewSet): - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] parser_classes = (QuerySchemaParser,) @@ -180,7 +194,8 @@ def parsing_error(ex): raise ValidationError(ex) query = json.loads( - query_source, parse_constant=lambda x: parsing_error(f"Unsupported constant found in JSON: {x}") + query_source, + parse_constant=lambda x: parsing_error(f"Unsupported constant found in JSON: {x}"), ) except (json.JSONDecodeError, UnicodeDecodeError) as error_main: raise ValidationError("Invalid JSON: %s" % (str(error_main))) @@ -211,7 +226,10 @@ def _unwrap_pydantic_dict(response: Any) -> Dict: def process_query( - team: Team, query_json: Dict, in_export_context: Optional[bool] = False, request: Optional[Request] = None + team: Team, + query_json: Dict, + in_export_context: Optional[bool] = False, + request: Optional[Request] = None, ) -> Dict: # query_json has been parsed by QuerySchemaParser # it _should_ be impossible to end up in here with a "bad" query diff --git a/posthog/api/routing.py b/posthog/api/routing.py index f5e56b5711ed6..0f61d093a03a7 100644 --- a/posthog/api/routing.py +++ b/posthog/api/routing.py @@ -125,7 +125,9 @@ def parents_query_dict(self) -> Dict[str, Any]: # drf-extensions nested parameters are prefixed if kwarg_name.startswith(extensions_api_settings.DEFAULT_PARENT_LOOKUP_KWARG_NAME_PREFIX): query_lookup = kwarg_name.replace( - extensions_api_settings.DEFAULT_PARENT_LOOKUP_KWARG_NAME_PREFIX, "", 1 + extensions_api_settings.DEFAULT_PARENT_LOOKUP_KWARG_NAME_PREFIX, + "", + 1, ) query_value = kwarg_value if query_value == "@current": diff --git a/posthog/api/shared.py b/posthog/api/shared.py index 43813eacb2ac2..1a497278b68d0 100644 --- a/posthog/api/shared.py +++ b/posthog/api/shared.py @@ -13,7 +13,14 @@ class UserBasicSerializer(serializers.ModelSerializer): class Meta: model = User - fields = ["id", "uuid", "distinct_id", "first_name", "email", "is_email_verified"] + fields = [ + "id", + "uuid", + "distinct_id", + "first_name", + "email", + "is_email_verified", + ] class TeamBasicSerializer(serializers.ModelSerializer): diff --git a/posthog/api/sharing.py b/posthog/api/sharing.py index ccbc8b5f68794..babc3a3ecfad4 100644 --- a/posthog/api/sharing.py +++ b/posthog/api/sharing.py @@ -18,11 +18,18 @@ from posthog.models import SharingConfiguration, Team from posthog.models.activity_logging.activity_log import log_activity, Detail, Change from posthog.models.dashboard import Dashboard -from posthog.models.exported_asset import ExportedAsset, asset_for_token, get_content_response +from posthog.models.exported_asset import ( + ExportedAsset, + asset_for_token, + get_content_response, +) from posthog.models.insight import Insight from posthog.models import SessionRecording from posthog.models.user import User -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.session_recordings.session_recording_api import SessionRecordingSerializer from posthog.user_permissions import UserPermissions from posthog.utils import render_template @@ -76,7 +83,11 @@ class Meta: class SharingConfigurationViewSet(StructuredViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet): - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] pagination_class = None queryset = SharingConfiguration.objects.select_related("dashboard", "insight", "recording") serializer_class = SharingConfigurationSerializer @@ -119,7 +130,12 @@ def _get_sharing_configuration(self, context: Dict[str, Any]): insight = context.get("insight") recording = context.get("recording") - config_kwargs = dict(team_id=self.team_id, insight=insight, dashboard=dashboard, recording=recording) + config_kwargs = dict( + team_id=self.team_id, + insight=insight, + dashboard=dashboard, + recording=recording, + ) try: instance = SharingConfiguration.objects.get(**config_kwargs) diff --git a/posthog/api/signup.py b/posthog/api/signup.py index ef19bae273ae4..511f425b71000 100644 --- a/posthog/api/signup.py +++ b/posthog/api/signup.py @@ -20,8 +20,18 @@ from posthog.demo.matrix import MatrixManager from posthog.demo.products.hedgebox import HedgeboxMatrix from posthog.email import is_email_available -from posthog.event_usage import alias_invite_id, report_user_joined_organization, report_user_signed_up -from posthog.models import Organization, OrganizationDomain, OrganizationInvite, Team, User +from posthog.event_usage import ( + alias_invite_id, + report_user_joined_organization, + report_user_signed_up, +) +from posthog.models import ( + Organization, + OrganizationDomain, + OrganizationInvite, + Team, + User, +) from posthog.permissions import CanCreateOrg from posthog.utils import get_can_create_org @@ -96,7 +106,8 @@ def create(self, validated_data, **kwargs): ) except IntegrityError: raise exceptions.ValidationError( - {"email": "There is already an account with this email address."}, code="unique" + {"email": "There is already an account with this email address."}, + code="unique", ) user = self._user @@ -128,11 +139,17 @@ def enter_demo(self, validated_data) -> User: matrix = HedgeboxMatrix() manager = MatrixManager(matrix, use_pre_save=True) with transaction.atomic(): - self._organization, self._team, self._user = manager.ensure_account_and_save( - email, first_name, organization_name, is_staff=is_staff - ) - - login(self.context["request"], self._user, backend="django.contrib.auth.backends.ModelBackend") + ( + self._organization, + self._team, + self._user, + ) = manager.ensure_account_and_save(email, first_name, organization_name, is_staff=is_staff) + + login( + self.context["request"], + self._user, + backend="django.contrib.auth.backends.ModelBackend", + ) return self._user def create_team(self, organization: Organization, user: User) -> Team: @@ -261,7 +278,11 @@ def get(self, request, *args, **kwargs): user = request.user if request.user.is_authenticated else None - invite.validate(user=user, invite_email=invite.target_email, request_path=f"/signup/{invite_id}") + invite.validate( + user=user, + invite_email=invite.target_email, + request_path=f"/signup/{invite_id}", + ) return response.Response( { @@ -312,7 +333,12 @@ def create(self, validated_data, **kwargs): serializer.is_valid(raise_exception=True) user = serializer.save() - logger.info(f"social_create_user_signup", full_name_len=len(first_name), email_len=len(email), user=user.id) + logger.info( + f"social_create_user_signup", + full_name_len=len(first_name), + email_len=len(email), + user=user.id, + ) return {"continue_url": reverse("social:complete", args=[request.session["backend"]])} @@ -353,7 +379,11 @@ def process_social_invite_signup(strategy: DjangoStrategy, invite_id: str, email try: invite = TeamInviteSurrogate(invite_id) except Team.DoesNotExist: - raise ValidationError("Team does not exist", code="invalid_invite", params={"source": "social_create_user"}) + raise ValidationError( + "Team does not exist", + code="invalid_invite", + params={"source": "social_create_user"}, + ) invite.validate(user=None, email=email) @@ -378,7 +408,10 @@ def process_social_domain_jit_provisioning_signup( logger.info(f"process_social_domain_jit_provisioning_signup", domain=domain) domain_instance = OrganizationDomain.objects.get(domain=domain) except OrganizationDomain.DoesNotExist: - logger.info(f"process_social_domain_jit_provisioning_signup_domain_does_not_exist", domain=domain) + logger.info( + f"process_social_domain_jit_provisioning_signup_domain_does_not_exist", + domain=domain, + ) return user else: logger.info( @@ -416,7 +449,13 @@ def process_social_domain_jit_provisioning_signup( @partial def social_create_user( - strategy: DjangoStrategy, details, backend, request, user: Union[User, None] = None, *args, **kwargs + strategy: DjangoStrategy, + details, + backend, + request, + user: Union[User, None] = None, + *args, + **kwargs, ): if user: logger.info(f"social_create_user_is_not_new") @@ -443,7 +482,8 @@ def social_create_user( if not email or not full_name: missing_attr = "email" if not email else "name" raise ValidationError( - {missing_attr: "This field is required and was not provided by the IdP."}, code="required" + {missing_attr: "This field is required and was not provided by the IdP."}, + code="required", ) logger.info(f"social_create_user", full_name_len=len(full_name), email_len=len(email)) @@ -466,7 +506,11 @@ def social_create_user( from_invite = True # jit_provisioning means they're definitely not organization_first_user if not user: - logger.info(f"social_create_user_jit_failed", full_name_len=len(full_name), email_len=len(email)) + logger.info( + f"social_create_user_jit_failed", + full_name_len=len(full_name), + email_len=len(email), + ) if not get_can_create_org(request.user): if email and OrganizationDomain.objects.get_verified_for_email_address(email): @@ -483,7 +527,11 @@ def social_create_user( "email": email or "", } query_params_string = urlencode(query_params) - logger.info("social_create_user_confirm_organization", full_name_len=len(full_name), email_len=len(email)) + logger.info( + "social_create_user_confirm_organization", + full_name_len=len(full_name), + email_len=len(email), + ) return redirect(f"/organization/confirm-creation?{query_params_string}") diff --git a/posthog/api/survey.py b/posthog/api/survey.py index 81523782a86a2..a5105e9d5908c 100644 --- a/posthog/api/survey.py +++ b/posthog/api/survey.py @@ -18,7 +18,10 @@ from posthog.models.feature_flag.feature_flag import FeatureFlag from posthog.models.team.team import Team -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from django.utils.text import slugify from django.views.decorators.csrf import csrf_exempt @@ -98,11 +101,11 @@ def validate_appearance(self, value): thank_you_message = value.get("thankYouMessageHeader") if thank_you_message and nh3.is_html(thank_you_message): - value["thankYouMessageHeader"] = nh3.clean(thank_you_message) + value["thankYouMessageHeader"] = nh3_clean_with_whitelist(thank_you_message) thank_you_description = value.get("thankYouMessageDescription") if thank_you_description and nh3.is_html(thank_you_description): - value["thankYouMessageDescription"] = nh3.clean(thank_you_description) + value["thankYouMessageDescription"] = nh3_clean_with_whitelist(thank_you_description) return value @@ -128,9 +131,9 @@ def validate_questions(self, value): description = raw_question.get("description") if nh3.is_html(question_text): - cleaned_question["question"] = nh3.clean(question_text) + cleaned_question["question"] = nh3_clean_with_whitelist(question_text) if description and nh3.is_html(description): - cleaned_question["description"] = nh3.clean(description) + cleaned_question["description"] = nh3_clean_with_whitelist(description) cleaned_questions.append(cleaned_question) @@ -342,3 +345,150 @@ def surveys(request: Request): ).data return cors_response(request, JsonResponse({"surveys": surveys})) + + +def nh3_clean_with_whitelist(to_clean: str): + return nh3.clean( + to_clean, + link_rel="noopener", + tags={ + "a", + "abbr", + "acronym", + "area", + "article", + "aside", + "b", + "bdi", + "bdo", + "blockquote", + "br", + "caption", + "center", + "cite", + "code", + "col", + "colgroup", + "data", + "dd", + "del", + "details", + "dfn", + "div", + "dl", + "dt", + "em", + "figcaption", + "figure", + "footer", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "header", + "hgroup", + "hr", + "i", + "img", + "ins", + "kbd", + "li", + "map", + "mark", + "nav", + "ol", + "p", + "pre", + "q", + "rp", + "rt", + "rtc", + "ruby", + "s", + "samp", + "small", + "span", + "strike", + "strong", + "sub", + "summary", + "sup", + "table", + "tbody", + "td", + "th", + "thead", + "time", + "tr", + "tt", + "u", + "ul", + "var", + "wbr", + }, + attributes={ + "*": {"style", "lang", "title", "width", "height"}, + # below are mostly defaults to ammonia, but we need to add them explicitly + # because this python binding doesn't allow additive whitelisting + "a": {"href", "hreflang"}, + "bdo": {"dir"}, + "blockquote": {"cite"}, + "col": {"align", "char", "charoff", "span"}, + "colgroup": {"align", "char", "charoff", "span"}, + "del": {"cite", "datetime"}, + "hr": {"align", "size", "width"}, + "img": {"align", "alt", "height", "src", "width"}, + "ins": {"cite", "datetime"}, + "ol": {"start", "type"}, + "q": {"cite"}, + "table": { + "align", + "bgcolor", + "border", + "cellpadding", + "cellspacing", + "frame", + "rules", + "summary", + "width", + }, + "tbody": {"align", "char", "charoff", "valign"}, + "td": { + "abbr", + "align", + "axis", + "bgcolor", + "char", + "charoff", + "colspan", + "headers", + "height", + "nowrap", + "rowspan", + "scope", + "valign", + "width", + }, + "tfoot": {"align", "char", "charoff", "valign"}, + "th": { + "abbr", + "align", + "axis", + "bgcolor", + "char", + "charoff", + "colspan", + "headers", + "height", + "nowrap", + "rowspan", + "scope", + "valign", + "width", + }, + "thead": {"align", "char", "charoff", "valign"}, + "tr": {"align", "bgcolor", "char", "charoff", "valign"}, + }, + ) diff --git a/posthog/api/tagged_item.py b/posthog/api/tagged_item.py index eb35c2746f0bb..d756b3411655e 100644 --- a/posthog/api/tagged_item.py +++ b/posthog/api/tagged_item.py @@ -23,7 +23,6 @@ def _is_licensed(self): ) def _attempt_set_tags(self, tags, obj, force_create=False): - if not force_create and not self._is_licensed() and tags is not None: # Silently fail on updating tags so that entire request isn't blocked return @@ -88,7 +87,11 @@ def is_licensed(self): def prefetch_tagged_items_if_available(self, queryset: QuerySet) -> QuerySet: if self.is_licensed(): return queryset.prefetch_related( - Prefetch("tagged_items", queryset=TaggedItem.objects.select_related("tag"), to_attr="prefetched_tags") + Prefetch( + "tagged_items", + queryset=TaggedItem.objects.select_related("tag"), + to_attr="prefetched_tags", + ) ) return queryset diff --git a/posthog/api/team.py b/posthog/api/team.py index 8127313491434..2c076a6a8d8b3 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -4,7 +4,14 @@ from django.core.cache import cache from django.shortcuts import get_object_or_404 -from rest_framework import exceptions, permissions, request, response, serializers, viewsets +from rest_framework import ( + exceptions, + permissions, + request, + response, + serializers, + viewsets, +) from rest_framework.decorators import action from posthog.api.geoip import get_geoip_properties @@ -16,7 +23,10 @@ from posthog.models.group_type_mapping import GroupTypeMapping from posthog.models.organization import OrganizationMembership from posthog.models.signals import mute_selected_signals -from posthog.models.team.team import groups_on_events_querying_enabled, set_team_in_cache +from posthog.models.team.team import ( + groups_on_events_querying_enabled, + set_team_in_cache, +) from posthog.models.team.util import delete_batch_exports, delete_bulky_postgres_data from posthog.models.utils import generate_random_token_project from posthog.permissions import ( @@ -194,7 +204,7 @@ def validate(self, attrs: Any) -> Any: organization_id = self.instance.organization_id else: organization_id = self.context["view"].organization - org_membership: OrganizationMembership = OrganizationMembership.objects.only("level").get( + org_membership: (OrganizationMembership) = OrganizationMembership.objects.only("level").get( organization_id=organization_id, user=request.user ) if org_membership.level < OrganizationMembership.Level.ADMIN: diff --git a/posthog/api/test/batch_exports/operations.py b/posthog/api/test/batch_exports/operations.py index 90d515130a9c4..6bcf8365a5f59 100644 --- a/posthog/api/test/batch_exports/operations.py +++ b/posthog/api/test/batch_exports/operations.py @@ -3,7 +3,11 @@ def create_batch_export(client: TestClient, team_id: int, batch_export_data: dict): - return client.post(f"/api/projects/{team_id}/batch_exports", batch_export_data, content_type="application/json") + return client.post( + f"/api/projects/{team_id}/batch_exports", + batch_export_data, + content_type="application/json", + ) def create_batch_export_ok(client: TestClient, team_id: int, batch_export_data: dict): @@ -47,7 +51,10 @@ def get_batch_export_ok(client: TestClient, team_id: int, batch_export_id: int): def get_batch_export_runs(client: TestClient, team_id: int, batch_export_id: str): - return client.get(f"/api/projects/{team_id}/batch_exports/{batch_export_id}/runs", content_type="application/json") + return client.get( + f"/api/projects/{team_id}/batch_exports/{batch_export_id}/runs", + content_type="application/json", + ) def get_batch_export_runs_ok(client: TestClient, team_id: int, batch_export_id: str): diff --git a/posthog/api/test/batch_exports/test_backfill.py b/posthog/api/test/batch_exports/test_backfill.py index 44f82b988dea6..a0c874717fb46 100644 --- a/posthog/api/test/batch_exports/test_backfill.py +++ b/posthog/api/test/batch_exports/test_backfill.py @@ -50,7 +50,13 @@ def test_batch_export_backfill(client: HttpClient): batch_export = create_batch_export_ok(client, team.pk, batch_export_data) batch_export_id = batch_export["id"] - response = backfill_batch_export(client, team.pk, batch_export_id, "2021-01-01T00:00:00", "2021-01-01T01:00:00") + response = backfill_batch_export( + client, + team.pk, + batch_export_id, + "2021-01-01T00:00:00", + "2021-01-01T01:00:00", + ) assert response.status_code == status.HTTP_200_OK, response.json() @@ -121,10 +127,22 @@ def test_batch_export_backfill_with_start_at_after_end_at(client: HttpClient): batch_export_id = batch_export["id"] - response = backfill_batch_export(client, team.pk, batch_export_id, "2021-01-01T01:00:00", "2021-01-01T01:00:00") + response = backfill_batch_export( + client, + team.pk, + batch_export_id, + "2021-01-01T01:00:00", + "2021-01-01T01:00:00", + ) assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() - response = backfill_batch_export(client, team.pk, batch_export_id, "2021-01-01T01:00:00", "2020-01-01T01:00:00") + response = backfill_batch_export( + client, + team.pk, + batch_export_id, + "2021-01-01T01:00:00", + "2020-01-01T01:00:00", + ) assert response.status_code == status.HTTP_400_BAD_REQUEST, response.json() @@ -162,7 +180,13 @@ def test_cannot_trigger_backfill_for_another_organization(client: HttpClient): batch_export_id = batch_export["id"] client.force_login(other_user) - response = backfill_batch_export(client, team.pk, batch_export_id, "2021-01-01T00:00:00", "2021-01-01T01:00:00") + response = backfill_batch_export( + client, + team.pk, + batch_export_id, + "2021-01-01T00:00:00", + "2021-01-01T01:00:00", + ) assert response.status_code == status.HTTP_403_FORBIDDEN, response.json() @@ -198,7 +222,11 @@ def test_backfill_is_partitioned_by_team_id(client: HttpClient): batch_export_id = batch_export["id"] response = backfill_batch_export( - client, other_team.pk, batch_export_id, "2021-01-01T00:00:00", "2021-01-01T01:00:00" + client, + other_team.pk, + batch_export_id, + "2021-01-01T00:00:00", + "2021-01-01T01:00:00", ) assert response.status_code == status.HTTP_404_NOT_FOUND, response.json() diff --git a/posthog/api/test/batch_exports/test_create.py b/posthog/api/test/batch_exports/test_create.py index ca58d13a17347..d2f0a13d72a0b 100644 --- a/posthog/api/test/batch_exports/test_create.py +++ b/posthog/api/test/batch_exports/test_create.py @@ -56,7 +56,8 @@ def test_create_batch_export_with_interval_schedule(client: HttpClient, interval with start_test_worker(temporal): with mock.patch( - "posthog.batch_exports.http.posthoganalytics.feature_enabled", return_value=True + "posthog.batch_exports.http.posthoganalytics.feature_enabled", + return_value=True, ) as feature_enabled: response = create_batch_export( client, @@ -70,7 +71,10 @@ def test_create_batch_export_with_interval_schedule(client: HttpClient, interval str(team.uuid), groups={"organization": str(team.organization.id)}, group_properties={ - "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at} + "organization": { + "id": str(team.organization.id), + "created_at": team.organization.created_at, + } }, send_feature_flag_events=False, ) @@ -179,7 +183,8 @@ def test_cannot_create_a_batch_export_with_higher_frequencies_if_not_enabled(cli with start_test_worker(temporal): client.force_login(user) with mock.patch( - "posthog.batch_exports.http.posthoganalytics.feature_enabled", return_value=False + "posthog.batch_exports.http.posthoganalytics.feature_enabled", + return_value=False, ) as feature_enabled: response = create_batch_export( client, @@ -192,7 +197,10 @@ def test_cannot_create_a_batch_export_with_higher_frequencies_if_not_enabled(cli str(team.uuid), groups={"organization": str(team.organization.id)}, group_properties={ - "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at} + "organization": { + "id": str(team.organization.id), + "created_at": team.organization.created_at, + } }, send_feature_flag_events=False, ) diff --git a/posthog/api/test/dashboards/__init__.py b/posthog/api/test/dashboards/__init__.py index 8239e83818ae1..79d1e435e64ec 100644 --- a/posthog/api/test/dashboards/__init__.py +++ b/posthog/api/test/dashboards/__init__.py @@ -22,7 +22,8 @@ def soft_delete( extra_data = {} api_response = self.client.patch( - f"/api/projects/{self.team.id}/{model_type}/{model_id}", {"deleted": True, **extra_data} + f"/api/projects/{self.team.id}/{model_type}/{model_id}", + {"deleted": True, **extra_data}, ) assert api_response.status_code == status.HTTP_200_OK self.assertEqual( @@ -107,7 +108,10 @@ def list_insights( if query_params is None: query_params = {} - response = self.client.get(f"/api/projects/{team_id}/insights/", {"basic": True, "limit": 30, **query_params}) + response = self.client.get( + f"/api/projects/{team_id}/insights/", + {"basic": True, "limit": 30, **query_params}, + ) self.assertEqual(response.status_code, expected_status) response_json = response.json() @@ -133,7 +137,10 @@ def get_insight( return response_json def create_insight( - self, data: Dict[str, Any], team_id: Optional[int] = None, expected_status: int = status.HTTP_201_CREATED + self, + data: Dict[str, Any], + team_id: Optional[int] = None, + expected_status: int = status.HTTP_201_CREATED, ) -> Tuple[int, Dict[str, Any]]: if team_id is None: team_id = self.team.id @@ -181,7 +188,8 @@ def create_text_tile( extra_data = {} response = self.client.patch( - f"/api/projects/{team_id}/dashboards/{dashboard_id}", {"tiles": [{"text": {"body": text}, **extra_data}]} + f"/api/projects/{team_id}/dashboards/{dashboard_id}", + {"tiles": [{"text": {"body": text}, **extra_data}]}, ) self.assertEqual(response.status_code, expected_status, response.json()) @@ -190,7 +198,10 @@ def create_text_tile( return response_json.get("id", None), response_json def get_insight_activity( - self, insight_id: Optional[int] = None, team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK + self, + insight_id: Optional[int] = None, + team_id: Optional[int] = None, + expected_status: int = status.HTTP_200_OK, ): if team_id is None: team_id = self.team.id @@ -259,9 +270,13 @@ def set_tile_layout(self, dashboard_id: int, expected_tiles_to_update: int) -> N self.assertEqual(response.status_code, status.HTTP_200_OK) def add_insight_to_dashboard( - self, dashboard_ids: List[int], insight_id: int, expected_status: int = status.HTTP_200_OK + self, + dashboard_ids: List[int], + insight_id: int, + expected_status: int = status.HTTP_200_OK, ): response = self.client.patch( - f"/api/projects/{self.team.id}/insights/{insight_id}", {"dashboards": dashboard_ids} + f"/api/projects/{self.team.id}/insights/{insight_id}", + {"dashboards": dashboard_ids}, ) self.assertEqual(response.status_code, expected_status) diff --git a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr index 33832fe5073e2..b6b14a9dfbd34 100644 --- a/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr +++ b/posthog/api/test/dashboards/__snapshots__/test_dashboard.ambr @@ -11232,6 +11232,24 @@ 5 /* ... */) /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ ' --- +# name: TestDashboard.test_retrieve_dashboard_list.33 + ' + SELECT "posthog_sharingconfiguration"."id", + "posthog_sharingconfiguration"."team_id", + "posthog_sharingconfiguration"."dashboard_id", + "posthog_sharingconfiguration"."insight_id", + "posthog_sharingconfiguration"."recording_id", + "posthog_sharingconfiguration"."created_at", + "posthog_sharingconfiguration"."enabled", + "posthog_sharingconfiguration"."access_token" + FROM "posthog_sharingconfiguration" + WHERE "posthog_sharingconfiguration"."dashboard_id" IN (1, + 2, + 3, + 4, + 5 /* ... */) /*controller='project_dashboards-list',route='api/projects/%28%3FP%3Cparent_lookup_team_id%3E%5B%5E/.%5D%2B%29/dashboards/%3F%24'*/ + ' +--- # name: TestDashboard.test_retrieve_dashboard_list.4 ' SELECT "posthog_dashboardtile"."id" diff --git a/posthog/api/test/dashboards/test_dashboard.py b/posthog/api/test/dashboards/test_dashboard.py index 5472106f36324..74216d2280453 100644 --- a/posthog/api/test/dashboards/test_dashboard.py +++ b/posthog/api/test/dashboards/test_dashboard.py @@ -45,7 +45,7 @@ "description": "Shows the number of unique users that use your app every day.", }, ], - "variables": [] + "variables": [], # purposely missing tags as they are not required } @@ -69,7 +69,10 @@ def test_retrieve_dashboard_list(self): self.dashboard_api.create_dashboard({"name": dashboard_name}) response_data = self.dashboard_api.list_dashboards() - self.assertEqual([dashboard["name"] for dashboard in response_data["results"]], dashboard_names) + self.assertEqual( + [dashboard["name"] for dashboard in response_data["results"]], + dashboard_names, + ) @snapshot_postgres_queries def test_retrieve_dashboard(self): @@ -82,9 +85,13 @@ def test_retrieve_dashboard(self): self.assertEqual(response_data["created_by"]["distinct_id"], self.user.distinct_id) self.assertEqual(response_data["created_by"]["first_name"], self.user.first_name) self.assertEqual(response_data["creation_mode"], "default") - self.assertEqual(response_data["restriction_level"], Dashboard.RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT) self.assertEqual( - response_data["effective_privilege_level"], Dashboard.RestrictionLevel.ONLY_COLLABORATORS_CAN_EDIT + response_data["restriction_level"], + Dashboard.RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, + ) + self.assertEqual( + response_data["effective_privilege_level"], + Dashboard.RestrictionLevel.ONLY_COLLABORATORS_CAN_EDIT, ) def test_create_basic_dashboard(self): @@ -95,9 +102,13 @@ def test_create_basic_dashboard(self): self.assertEqual(response_data["description"], "") self.assertEqual(response_data["tags"], []) self.assertEqual(response_data["creation_mode"], "default") - self.assertEqual(response_data["restriction_level"], Dashboard.RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT) self.assertEqual( - response_data["effective_privilege_level"], Dashboard.RestrictionLevel.ONLY_COLLABORATORS_CAN_EDIT + response_data["restriction_level"], + Dashboard.RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, + ) + self.assertEqual( + response_data["effective_privilege_level"], + Dashboard.RestrictionLevel.ONLY_COLLABORATORS_CAN_EDIT, ) instance = Dashboard.objects.get(id=response_data["id"]) @@ -105,7 +116,10 @@ def test_create_basic_dashboard(self): def test_update_dashboard(self): dashboard = Dashboard.objects.create( - team=self.team, name="private dashboard", created_by=self.user, creation_mode="template" + team=self.team, + name="private dashboard", + created_by=self.user, + creation_mode="template", ) _, response_data = self.dashboard_api.update_dashboard( dashboard.pk, {"name": "dashboard new name", "creation_mode": "duplicate"} @@ -114,9 +128,13 @@ def test_update_dashboard(self): self.assertEqual(response_data["name"], "dashboard new name") self.assertEqual(response_data["created_by"]["distinct_id"], self.user.distinct_id) self.assertEqual(response_data["creation_mode"], "template") - self.assertEqual(response_data["restriction_level"], Dashboard.RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT) self.assertEqual( - response_data["effective_privilege_level"], Dashboard.RestrictionLevel.ONLY_COLLABORATORS_CAN_EDIT + response_data["restriction_level"], + Dashboard.RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, + ) + self.assertEqual( + response_data["effective_privilege_level"], + Dashboard.RestrictionLevel.ONLY_COLLABORATORS_CAN_EDIT, ) dashboard.refresh_from_db() @@ -124,11 +142,23 @@ def test_update_dashboard(self): def test_cannot_update_dashboard_with_invalid_filters(self): dashboard = Dashboard.objects.create( - team=self.team, name="private dashboard", created_by=self.user, creation_mode="template" + team=self.team, + name="private dashboard", + created_by=self.user, + creation_mode="template", ) self.dashboard_api.update_dashboard( dashboard.pk, - {"filters": [{"key": "brand", "value": ["1"], "operator": "exact", "type": "event"}]}, + { + "filters": [ + { + "key": "brand", + "value": ["1"], + "operator": "exact", + "type": "event", + } + ] + }, expected_status=status.HTTP_400_BAD_REQUEST, ) @@ -161,7 +191,10 @@ def test_shared_dashboard(self): def test_return_cached_results_bleh(self): dashboard = Dashboard.objects.create(team=self.team, name="dashboard") - filter_dict = {"events": [{"id": "$pageview"}], "properties": [{"key": "$browser", "value": "Mac OS X"}]} + filter_dict = { + "events": [{"id": "$pageview"}], + "properties": [{"key": "$browser", "value": "Mac OS X"}], + } filter = Filter(data=filter_dict) item = Insight.objects.create(filters=filter_dict, team=self.team) @@ -178,12 +211,23 @@ def test_return_cached_results_bleh(self): ).json() item = Insight.objects.get(pk=item.pk) self.assertAlmostEqual(item.caching_state.last_refresh, now(), delta=timezone.timedelta(seconds=5)) - self.assertAlmostEqual(parser.isoparse(response["last_refresh"]), now(), delta=timezone.timedelta(seconds=5)) - self.assertEqual(item.caching_state.cache_key, generate_cache_key(f"{filter.toJSON()}_{self.team.pk}")) + self.assertAlmostEqual( + parser.isoparse(response["last_refresh"]), + now(), + delta=timezone.timedelta(seconds=5), + ) + self.assertEqual( + item.caching_state.cache_key, + generate_cache_key(f"{filter.toJSON()}_{self.team.pk}"), + ) response = self.dashboard_api.get_dashboard(dashboard.pk) - self.assertAlmostEqual(Dashboard.objects.get().last_accessed_at, now(), delta=timezone.timedelta(seconds=5)) + self.assertAlmostEqual( + Dashboard.objects.get().last_accessed_at, + now(), + delta=timezone.timedelta(seconds=5), + ) self.assertEqual(response["tiles"][0]["insight"]["result"][0]["count"], 0) # :KLUDGE: avoid making extra queries that are explicitly not cached in tests. Avoids false N+1-s. @@ -274,12 +318,17 @@ def test_loading_individual_dashboard_does_not_prefetch_all_possible_tiles(self) def test_no_cache_available(self): dashboard = Dashboard.objects.create(team=self.team, name="dashboard") - filter_dict = {"events": [{"id": "$pageview"}], "properties": [{"key": "$browser", "value": "Mac OS X"}]} + filter_dict = { + "events": [{"id": "$pageview"}], + "properties": [{"key": "$browser", "value": "Mac OS X"}], + } with freeze_time("2020-01-04T13:00:01Z"): # Pretend we cached something a while ago, but we won't have anything in the redis cache insight = Insight.objects.create( - filters=Filter(data=filter_dict).to_dict(), team=self.team, last_refresh=now() + filters=Filter(data=filter_dict).to_dict(), + team=self.team, + last_refresh=now(), ) DashboardTile.objects.create(dashboard=dashboard, insight=insight) @@ -296,7 +345,10 @@ def test_refresh_cache(self): # Pretend we cached something a while ago, but we won't have anything in the redis cache item_default: Insight = Insight.objects.create( filters=Filter( - data={"events": [{"id": "$pageview"}], "properties": [{"key": "$browser", "value": "Mac OS X"}]} + data={ + "events": [{"id": "$pageview"}], + "properties": [{"key": "$browser", "value": "Mac OS X"}], + } ).to_dict(), team=self.team, order=0, @@ -306,7 +358,14 @@ def test_refresh_cache(self): filters=Filter( data={ "display": "ActionsLineGraph", - "events": [{"id": "$pageview", "type": "events", "order": 0, "properties": []}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "properties": [], + } + ], "filters": [], "interval": "day", "pagination": {}, @@ -331,14 +390,24 @@ def test_refresh_cache(self): item_trends.refresh_from_db() self.assertEqual( - parser.isoparse(response_data["tiles"][0]["last_refresh"]), item_default.caching_state.last_refresh + parser.isoparse(response_data["tiles"][0]["last_refresh"]), + item_default.caching_state.last_refresh, ) self.assertEqual( - parser.isoparse(response_data["tiles"][1]["last_refresh"]), item_default.caching_state.last_refresh + parser.isoparse(response_data["tiles"][1]["last_refresh"]), + item_default.caching_state.last_refresh, ) - self.assertAlmostEqual(item_default.caching_state.last_refresh, now(), delta=timezone.timedelta(seconds=5)) - self.assertAlmostEqual(item_trends.caching_state.last_refresh, now(), delta=timezone.timedelta(seconds=5)) + self.assertAlmostEqual( + item_default.caching_state.last_refresh, + now(), + delta=timezone.timedelta(seconds=5), + ) + self.assertAlmostEqual( + item_trends.caching_state.last_refresh, + now(), + delta=timezone.timedelta(seconds=5), + ) def test_dashboard_endpoints(self): # create @@ -370,7 +439,11 @@ def test_dashboard_endpoints(self): def test_delete_does_not_delete_insights_by_default(self): dashboard_id, _ = self.dashboard_api.create_dashboard({"filters": {"date_from": "-14d"}}) insight_id, _ = self.dashboard_api.create_insight( - {"filters": {"hello": "test", "date_from": "-7d"}, "dashboards": [dashboard_id], "name": "some_item"} + { + "filters": {"hello": "test", "date_from": "-7d"}, + "dashboards": [dashboard_id], + "name": "some_item", + } ) dashboard_before_delete = self.dashboard_api.get_dashboard(dashboard_id) @@ -395,7 +468,10 @@ def test_delete_dashboard_can_delete_tiles(self): ) insight_on_two_dashboards_id, _ = self.dashboard_api.create_insight( - {"name": "on two dashboards", "dashboards": [dashboard_one_id, dashboard_two_id]} + { + "name": "on two dashboards", + "dashboards": [dashboard_one_id, dashboard_two_id], + } ) dashboard_one_before_delete = self.dashboard_api.get_dashboard(dashboard_one_id) @@ -407,9 +483,15 @@ def test_delete_dashboard_can_delete_tiles(self): self.dashboard_api.soft_delete(dashboard_one_id, "dashboards", {"delete_insights": True}) self.dashboard_api.get_insight( - insight_on_one_dashboard_id, self.team.id, expected_status=status.HTTP_404_NOT_FOUND + insight_on_one_dashboard_id, + self.team.id, + expected_status=status.HTTP_404_NOT_FOUND, + ) + self.dashboard_api.get_insight( + insight_on_two_dashboards_id, + self.team.id, + expected_status=status.HTTP_200_OK, ) - self.dashboard_api.get_insight(insight_on_two_dashboards_id, self.team.id, expected_status=status.HTTP_200_OK) dashboard_two_after_delete = self.dashboard_api.get_dashboard(dashboard_two_id) assert len(dashboard_two_after_delete["tiles"]) == 1 @@ -417,7 +499,11 @@ def test_delete_dashboard_can_delete_tiles(self): def test_dashboard_items(self): dashboard_id, _ = self.dashboard_api.create_dashboard({"filters": {"date_from": "-14d"}}) insight_id, _ = self.dashboard_api.create_insight( - {"filters": {"hello": "test", "date_from": "-7d"}, "dashboards": [dashboard_id], "name": "some_item"} + { + "filters": {"hello": "test", "date_from": "-7d"}, + "dashboards": [dashboard_id], + "name": "some_item", + } ) response = self.dashboard_api.get_dashboard(dashboard_id) @@ -443,10 +529,18 @@ def test_dashboard_insights_out_of_synch_with_tiles_are_not_shown(self): """ dashboard_id, _ = self.dashboard_api.create_dashboard({"filters": {"date_from": "-14d"}}) insight_id, _ = self.dashboard_api.create_insight( - {"filters": {"hello": "test", "date_from": "-7d"}, "dashboards": [dashboard_id], "name": "some_item"} + { + "filters": {"hello": "test", "date_from": "-7d"}, + "dashboards": [dashboard_id], + "name": "some_item", + } ) out_of_synch_insight_id, _ = self.dashboard_api.create_insight( - {"filters": {"hello": "test", "date_from": "-7d"}, "dashboards": [dashboard_id], "name": "out of synch"} + { + "filters": {"hello": "test", "date_from": "-7d"}, + "dashboards": [dashboard_id], + "name": "out of synch", + } ) response = self.dashboard_api.get_dashboard(dashboard_id) @@ -469,7 +563,11 @@ def test_dashboard_insights_out_of_synch_with_tiles_are_not_shown(self): def test_dashboard_insight_tiles_can_be_loaded_correct_context(self): dashboard_id, _ = self.dashboard_api.create_dashboard({"filters": {"date_from": "-14d"}}) insight_id, _ = self.dashboard_api.create_insight( - {"filters": {"hello": "test", "date_from": "-7d"}, "dashboards": [dashboard_id], "name": "some_item"} + { + "filters": {"hello": "test", "date_from": "-7d"}, + "dashboards": [dashboard_id], + "name": "some_item", + } ) response = self.dashboard_api.get_dashboard(dashboard_id) @@ -483,19 +581,32 @@ def test_dashboard_insight_tiles_can_be_loaded_correct_context(self): def test_dashboard_filtering_on_properties(self): dashboard_id, _ = self.dashboard_api.create_dashboard({"filters": {"date_from": "-24h"}}) _, response = self.dashboard_api.update_dashboard( - dashboard_id, {"filters": {"date_from": "-24h", "properties": [{"key": "prop", "value": "val"}]}} + dashboard_id, + { + "filters": { + "date_from": "-24h", + "properties": [{"key": "prop", "value": "val"}], + } + }, ) self.assertEqual(response["filters"]["properties"], [{"key": "prop", "value": "val"}]) insight_id, _ = self.dashboard_api.create_insight( - {"filters": {"hello": "test", "date_from": "-7d"}, "dashboards": [dashboard_id], "name": "some_item"} + { + "filters": {"hello": "test", "date_from": "-7d"}, + "dashboards": [dashboard_id], + "name": "some_item", + } ) response = self.dashboard_api.get_dashboard(dashboard_id) self.assertEqual(len(response["tiles"]), 1) self.assertEqual(response["tiles"][0]["insight"]["name"], "some_item") - self.assertEqual(response["tiles"][0]["insight"]["filters"]["properties"], [{"key": "prop", "value": "val"}]) + self.assertEqual( + response["tiles"][0]["insight"]["filters"]["properties"], + [{"key": "prop", "value": "val"}], + ) def test_dashboard_filter_is_applied_even_if_insight_is_created_before_dashboard(self): insight_id, _ = self.dashboard_api.create_insight( @@ -535,7 +646,11 @@ def test_dashboard_item_layout(self): dashboard_id, _ = self.dashboard_api.create_dashboard({"name": "asdasd", "pinned": True}) insight_id, _ = self.dashboard_api.create_insight( - {"filters": {"hello": "test"}, "dashboards": [dashboard_id], "name": "another"} + { + "filters": {"hello": "test"}, + "dashboards": [dashboard_id], + "name": "another", + } ) dashboard_json = self.dashboard_api.get_dashboard(dashboard_id) @@ -554,7 +669,11 @@ def test_dashboard_tile_color_can_be_set_for_new_or_existing_tiles(self): dashboard_id, _ = self.dashboard_api.create_dashboard({"name": "asdasd", "pinned": True}) insight_id, _ = self.dashboard_api.create_insight( - {"filters": {"hello": "test"}, "dashboards": [dashboard_id], "name": "another"} + { + "filters": {"hello": "test"}, + "dashboards": [dashboard_id], + "name": "another", + } ) dashboard_json = self.dashboard_api.get_dashboard(dashboard_id) @@ -644,7 +763,11 @@ def test_dashboard_creation_validation(self): # valid - use_dashboard is set and use_template empty string self.dashboard_api.create_dashboard( - {"name": "another", "use_template": "", "use_dashboard": existing_dashboard.id}, + { + "name": "another", + "use_template": "", + "use_dashboard": existing_dashboard.id, + }, expected_status=status.HTTP_201_CREATED, ) @@ -705,7 +828,8 @@ def test_dashboard_duplication_without_tile_duplicate_excludes_soft_deleted_tile assert tile_to_delete["insight"]["id"] == insight_two_id self.dashboard_api.update_dashboard( - existing_dashboard.pk, {"tiles": [{"id": tile_to_delete["id"], "deleted": True}]} + existing_dashboard.pk, + {"tiles": [{"id": tile_to_delete["id"], "deleted": True}]}, ) dashboard_json = self.dashboard_api.get_dashboard(existing_dashboard.pk) assert len(dashboard_json["tiles"]) == 2 @@ -723,7 +847,11 @@ def test_dashboard_duplication_can_duplicate_tiles(self): _, dashboard_with_tiles = self.dashboard_api.create_text_tile(existing_dashboard.id) _, duplicate_response = self.dashboard_api.create_dashboard( - {"name": "another", "use_dashboard": existing_dashboard.id, "duplicate_tiles": True} + { + "name": "another", + "use_dashboard": existing_dashboard.id, + "duplicate_tiles": True, + } ) after_duplication_insight_id = duplicate_response["tiles"][0]["insight"]["id"] @@ -739,7 +867,11 @@ def test_dashboard_duplication_can_duplicate_tiles_without_editing_name_if_there _, dashboard_with_tiles = self.dashboard_api.create_text_tile(existing_dashboard.id) _, duplicate_response = self.dashboard_api.create_dashboard( - {"name": "another", "use_dashboard": existing_dashboard.id, "duplicate_tiles": True} + { + "name": "another", + "use_dashboard": existing_dashboard.id, + "duplicate_tiles": True, + } ) assert duplicate_response["tiles"][0]["insight"]["name"] is None @@ -766,14 +898,16 @@ def test_dashboard_duplication(self): def test_invalid_dashboard_duplication(self): # pass a random number (non-existent dashboard id) as use_dashboard self.dashboard_api.create_dashboard( - {"name": "another", "use_dashboard": 12345}, expected_status=status.HTTP_400_BAD_REQUEST + {"name": "another", "use_dashboard": 12345}, + expected_status=status.HTTP_400_BAD_REQUEST, ) def test_duplication_fail_for_different_team(self): another_team = Team.objects.create(organization=self.organization) another_team_dashboard = Dashboard.objects.create(team=another_team, name="Another Team's Dashboard") self.dashboard_api.create_dashboard( - {"name": "another", "use_dashboard": another_team_dashboard.id}, expected_status=status.HTTP_400_BAD_REQUEST + {"name": "another", "use_dashboard": another_team_dashboard.id}, + expected_status=status.HTTP_400_BAD_REQUEST, ) def test_return_cached_results_dashboard_has_filters(self): @@ -837,12 +971,21 @@ def test_invalid_properties(self): def test_insights_with_no_insight_set(self): # We were saving some insights on the default dashboard with no insight dashboard = Dashboard.objects.create(team=self.team, name="Dashboard", created_by=self.user) - item = Insight.objects.create(filters={"events": [{"id": "$pageview"}]}, team=self.team, last_refresh=now()) + item = Insight.objects.create( + filters={"events": [{"id": "$pageview"}]}, + team=self.team, + last_refresh=now(), + ) DashboardTile.objects.create(insight=item, dashboard=dashboard) response = self.dashboard_api.get_dashboard(dashboard.pk) self.assertEqual( response["tiles"][0]["insight"]["filters"], - {"events": [{"id": "$pageview"}], "insight": "TRENDS", "date_from": "-7d", "date_to": None}, + { + "events": [{"id": "$pageview"}], + "insight": "TRENDS", + "date_from": "-7d", + "date_to": None, + }, ) def test_retrieve_dashboard_different_team(self): @@ -1036,7 +1179,10 @@ def test_create_from_template_json_must_provide_at_least_one_tile(self) -> None: assert response.status_code == 400, response.json() def test_create_from_template_json_cam_provide_text_tile(self) -> None: - template: Dict = {**valid_template, "tiles": [{"type": "TEXT", "body": "hello world", "layouts": {}}]} + template: Dict = { + **valid_template, + "tiles": [{"type": "TEXT", "body": "hello world", "layouts": {}}], + } response = self.client.post( f"/api/projects/{self.team.id}/dashboards/create_from_template_json", @@ -1068,7 +1214,12 @@ def test_create_from_template_json_cam_provide_query_tile(self) -> None: **valid_template, # client provides an incorrect "empty" filter alongside a query "tiles": [ - {"type": "INSIGHT", "query": {"kind": "a datatable"}, "filters": {"date_from": None}, "layouts": {}} + { + "type": "INSIGHT", + "query": {"kind": "a datatable"}, + "filters": {"date_from": None}, + "layouts": {}, + } ], } @@ -1085,7 +1236,13 @@ def test_create_from_template_json_cam_provide_query_tile(self) -> None: "insight": { "created_at": ANY, "created_by": None, - "dashboard_tiles": [{"dashboard_id": response.json()["id"], "deleted": None, "id": ANY}], + "dashboard_tiles": [ + { + "dashboard_id": response.json()["id"], + "deleted": None, + "id": ANY, + } + ], "dashboards": [response.json()["id"]], "deleted": False, "derived_name": None, diff --git a/posthog/api/test/dashboards/test_dashboard_duplication.py b/posthog/api/test/dashboards/test_dashboard_duplication.py index bb273487a5ab6..dbfa572e9c014 100644 --- a/posthog/api/test/dashboards/test_dashboard_duplication.py +++ b/posthog/api/test/dashboards/test_dashboard_duplication.py @@ -15,7 +15,8 @@ def setUp(self) -> None: dashboard_id, _ = self.dashboard_api.create_dashboard({}) self.dashboard_api.create_text_tile( - dashboard_id, extra_data={"layouts": self.tile_layout, "color": self.tile_color} + dashboard_id, + extra_data={"layouts": self.tile_layout, "color": self.tile_color}, ) self.dashboard_api.create_insight({"dashboards": [dashboard_id]}) @@ -36,7 +37,11 @@ def setUp(self) -> None: def test_duplicating_dashboard_while_duplicating_tiles(self) -> None: duplicated_dashboard = self.client.post( f"/api/projects/{self.team.id}/dashboards/", - {"duplicate_tiles": True, "use_dashboard": self.starting_dashboard["id"], "name": "new"}, + { + "duplicate_tiles": True, + "use_dashboard": self.starting_dashboard["id"], + "name": "new", + }, ).json() assert len(duplicated_dashboard["tiles"]) == 2 @@ -45,13 +50,23 @@ def test_duplicating_dashboard_while_duplicating_tiles(self) -> None: # makes new children assert self.original_child_ids != self._tile_child_ids_from(duplicated_dashboard) - assert [tile["color"] for tile in duplicated_dashboard["tiles"]] == [self.tile_color, self.tile_color] - assert [tile["layouts"] for tile in duplicated_dashboard["tiles"]] == [self.tile_layout, self.tile_layout] + assert [tile["color"] for tile in duplicated_dashboard["tiles"]] == [ + self.tile_color, + self.tile_color, + ] + assert [tile["layouts"] for tile in duplicated_dashboard["tiles"]] == [ + self.tile_layout, + self.tile_layout, + ] def test_duplicating_dashboard_without_duplicating_tiles(self) -> None: duplicated_dashboard = self.client.post( f"/api/projects/{self.team.id}/dashboards/", - {"duplicate_tiles": False, "use_dashboard": self.starting_dashboard["id"], "name": "new"}, + { + "duplicate_tiles": False, + "use_dashboard": self.starting_dashboard["id"], + "name": "new", + }, ).json() assert len(duplicated_dashboard["tiles"]) == 2 @@ -60,8 +75,14 @@ def test_duplicating_dashboard_without_duplicating_tiles(self) -> None: # uses existing children assert self.original_child_ids == self._tile_child_ids_from(duplicated_dashboard) - assert [tile["color"] for tile in duplicated_dashboard["tiles"]] == [self.tile_color, self.tile_color] - assert [tile["layouts"] for tile in duplicated_dashboard["tiles"]] == [self.tile_layout, self.tile_layout] + assert [tile["color"] for tile in duplicated_dashboard["tiles"]] == [ + self.tile_color, + self.tile_color, + ] + assert [tile["layouts"] for tile in duplicated_dashboard["tiles"]] == [ + self.tile_layout, + self.tile_layout, + ] @staticmethod def _tile_child_ids_from(dashboard_json: Dict) -> List[int]: diff --git a/posthog/api/test/dashboards/test_dashboard_text_tiles.py b/posthog/api/test/dashboards/test_dashboard_text_tiles.py index 1ab85f9be6d57..509c7b0b9f36a 100644 --- a/posthog/api/test/dashboards/test_dashboard_text_tiles.py +++ b/posthog/api/test/dashboards/test_dashboard_text_tiles.py @@ -188,7 +188,11 @@ def test_can_remove_text_tiles_from_dashboard(self) -> None: dashboard_json = self.dashboard_api.get_dashboard(dashboard_id) tiles = dashboard_json["tiles"] assert len(tiles) == 3 - assert [t["text"]["body"] for t in tiles] == ["io sono testo", "soy texto", "i am text"] + assert [t["text"]["body"] for t in tiles] == [ + "io sono testo", + "soy texto", + "i am text", + ] def test_do_not_see_deleted_text_tiles_when_adding_new_ones(self) -> None: dashboard_id, _ = self.dashboard_api.create_dashboard({"name": "dashboard"}) diff --git a/posthog/api/test/notebooks/test_notebook.py b/posthog/api/test/notebooks/test_notebook.py index 1b7f36ae54ce3..a82b8aef4062d 100644 --- a/posthog/api/test/notebooks/test_notebook.py +++ b/posthog/api/test/notebooks/test_notebook.py @@ -54,7 +54,10 @@ def test_cannot_list_deleted_notebook(self) -> None: notebook_two = self.client.post(f"/api/projects/{self.team.id}/notebooks", data={}).json() notebook_three = self.client.post(f"/api/projects/{self.team.id}/notebooks", data={}).json() - self.client.patch(f"/api/projects/{self.team.id}/notebooks/{notebook_two['short_id']}", data={"deleted": True}) + self.client.patch( + f"/api/projects/{self.team.id}/notebooks/{notebook_two['short_id']}", + data={"deleted": True}, + ) response = self.client.get(f"/api/projects/{self.team.id}/notebooks") @@ -68,12 +71,17 @@ def test_cannot_list_deleted_notebook(self) -> None: @parameterized.expand( [ ("without_content", None, None), - ("with_content", {"some": "kind", "of": "tip", "tap": "content"}, "some kind of tip tap content"), + ( + "with_content", + {"some": "kind", "of": "tip", "tap": "content"}, + "some kind of tip tap content", + ), ] ) def test_create_a_notebook(self, _, content: Dict | None, text_content: str | None) -> None: response = self.client.post( - f"/api/projects/{self.team.id}/notebooks", data={"content": content, "text_content": text_content} + f"/api/projects/{self.team.id}/notebooks", + data={"content": content, "text_content": text_content}, ) assert response.status_code == status.HTTP_201_CREATED assert response.json() == { @@ -113,7 +121,11 @@ def test_updates_notebook(self) -> None: with freeze_time("2022-01-02"): response = self.client.patch( f"/api/projects/{self.team.id}/notebooks/{short_id}", - {"content": {"some": "updated content"}, "version": response_json["version"], "title": "New title"}, + { + "content": {"some": "updated content"}, + "version": response_json["version"], + "title": "New title", + }, ) assert response.json()["short_id"] == short_id @@ -157,7 +169,10 @@ def test_updates_notebook(self) -> None: }, "item_id": response.json()["id"], "scope": "Notebook", - "user": {"email": self.user.email, "first_name": self.user.first_name}, + "user": { + "email": self.user.email, + "first_name": self.user.first_name, + }, }, ], ) diff --git a/posthog/api/test/notebooks/test_notebook_filtering.py b/posthog/api/test/notebooks/test_notebook_filtering.py index 9f6eda474060a..bbe191892d8e8 100644 --- a/posthog/api/test/notebooks/test_notebook_filtering.py +++ b/posthog/api/test/notebooks/test_notebook_filtering.py @@ -23,13 +23,19 @@ PERSON_CONTENT = lambda id: {"type": "ph-person", "attrs": {"id": id or "person_id"}} -RECORDING_CONTENT = lambda id: {"type": "ph-recording", "attrs": {"id": id or "session_recording_id"}} +RECORDING_CONTENT = lambda id: { + "type": "ph-recording", + "attrs": {"id": id or "session_recording_id"}, +} RECORDING_COMMENT_CONTENT = lambda id, text: { "type": "paragraph", "content": [ { "type": "ph-replay-timestamp", - "attrs": {"playbackTime": 0, "sessionRecordingId": id or "session_recording_id"}, + "attrs": { + "playbackTime": 0, + "sessionRecordingId": id or "session_recording_id", + }, }, {"text": text or "what the person typed", "type": "text"}, ], @@ -45,7 +51,11 @@ }, } -BASIC_TEXT = lambda text: {"type": "paragraph", "content": [{"text": text, "type": "text"}], "text_content": text} +BASIC_TEXT = lambda text: { + "type": "paragraph", + "content": [{"text": text, "type": "text"}], + "text_content": text, +} class TestNotebooksFiltering(APIBaseTest, QueryMatchingTest): @@ -124,7 +134,10 @@ def test_filters_based_on_params(self) -> None: f"/api/projects/{self.team.id}/notebooks?user=true", ).json()["results"] - assert [r["short_id"] for r in results] == [notebook_two.short_id, notebook_one.short_id] + assert [r["short_id"] for r in results] == [ + notebook_two.short_id, + notebook_one.short_id, + ] response = self.client.get( f"/api/projects/{self.team.id}/notebooks?created_by={other_user.uuid}", @@ -362,8 +375,14 @@ def test_filtering_by_id_of_types(self) -> None: ) def test_notebook_filter_can_combine(self) -> None: - recording_one_node = {"type": "ph-recording", "attrs": {"id": "recording_one", "height": None}} - recording_two_node = {"type": "ph-recording", "attrs": {"id": "recording_two", "height": None}} + recording_one_node = { + "type": "ph-recording", + "attrs": {"id": "recording_one", "height": None}, + } + recording_two_node = { + "type": "ph-recording", + "attrs": {"id": "recording_two", "height": None}, + } content_with_both_recordings = [recording_one_node, recording_two_node] content_with_recording_one = [recording_one_node] diff --git a/posthog/api/test/openapi_validation.py b/posthog/api/test/openapi_validation.py index 0c3bd22d26896..e86bf5198bb53 100644 --- a/posthog/api/test/openapi_validation.py +++ b/posthog/api/test/openapi_validation.py @@ -56,7 +56,10 @@ def validate_response(openapi_spec: Dict[str, Any], response: Any, path_override request_body_content_type = response.request.get("CONTENT_TYPE", "*/*").split(";")[0] request_body_content_encoding = response.request.get("HTTP_CONTENT_ENCODING", None) - request_body_value = cast(bytes, request_fake_payload._FakePayload__content.getvalue()) # type: ignore + request_body_value = cast( + bytes, + request_fake_payload._FakePayload__content.getvalue(), # type: ignore + ) if request_body_content_encoding == "gzip": request_body = gzip.decompress(request_body_value) elif request_body_content_encoding == "lz64": diff --git a/posthog/api/test/test_action.py b/posthog/api/test/test_action.py index 15ba5f30157de..fc21c9f12f270 100644 --- a/posthog/api/test/test_action.py +++ b/posthog/api/test/test_action.py @@ -22,7 +22,14 @@ def test_create_action(self, patch_capture, *args): f"/api/projects/{self.team.id}/actions/", data={ "name": "user signed up", - "steps": [{"text": "sign up", "selector": "div > button", "url": "/signup", "isNew": "asdf"}], + "steps": [ + { + "text": "sign up", + "selector": "div > button", + "url": "/signup", + "isNew": "asdf", + } + ], "description": "Test description", }, HTTP_ORIGIN="http://testserver", @@ -68,7 +75,9 @@ def test_cant_create_action_with_the_same_name(self, *args): # Make sure the endpoint works with and without the trailing slash response = self.client.post( - f"/api/projects/{self.team.id}/actions/", {"name": "user signed up"}, HTTP_ORIGIN="http://testserver" + f"/api/projects/{self.team.id}/actions/", + {"name": "user signed up"}, + HTTP_ORIGIN="http://testserver", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -157,7 +166,6 @@ def test_update_action(self, patch_capture, *args): self.client.get(f"/api/projects/{self.team.id}/actions/") def test_update_action_remove_all_steps(self, *args): - action = Action.objects.create(name="user signed up", team=self.team) ActionStep.objects.create(action=action, text="sign me up!") @@ -202,11 +210,15 @@ def test_create_from_other_domain(self, *args): self.assertEqual(response.status_code, 201) self.assertEqual(response.json()["post_to_slack"], True) - list_response = self.client.get(f"/api/projects/{self.team.id}/actions/", HTTP_ORIGIN="https://evilwebsite.com") + list_response = self.client.get( + f"/api/projects/{self.team.id}/actions/", + HTTP_ORIGIN="https://evilwebsite.com", + ) self.assertEqual(list_response.status_code, 403) detail_response = self.client.get( - f"/api/projects/{self.team.id}/actions/{response.json()['id']}/", HTTP_ORIGIN="https://evilwebsite.com" + f"/api/projects/{self.team.id}/actions/{response.json()['id']}/", + HTTP_ORIGIN="https://evilwebsite.com", ) self.assertEqual(detail_response.status_code, 403) @@ -250,10 +262,25 @@ def test_get_event_count(self, *args): team2 = Organization.objects.bootstrap(None, team_fields={"name": "bla"})[2] action = Action.objects.create(team=self.team, name="bla") ActionStep.objects.create(action=action, event="custom event") - _create_event(event="custom event", team=self.team, distinct_id="test", timestamp="2021-12-04T19:20:00Z") - _create_event(event="another event", team=self.team, distinct_id="test", timestamp="2021-12-04T19:20:00Z") + _create_event( + event="custom event", + team=self.team, + distinct_id="test", + timestamp="2021-12-04T19:20:00Z", + ) + _create_event( + event="another event", + team=self.team, + distinct_id="test", + timestamp="2021-12-04T19:20:00Z", + ) # test team leakage - _create_event(event="custom event", team=team2, distinct_id="test", timestamp="2021-12-04T19:20:00Z") + _create_event( + event="custom event", + team=team2, + distinct_id="test", + timestamp="2021-12-04T19:20:00Z", + ) response = self.client.get(f"/api/projects/{self.team.id}/actions/{action.id}/count").json() self.assertEqual(response, {"count": 1}) @@ -262,10 +289,22 @@ def test_get_event_count(self, *args): def test_hogql_filter(self, *args): action = Action.objects.create(team=self.team, name="bla") ActionStep.objects.create( - action=action, event="custom event", properties=[{"key": "'a%sd' != 'sdf'", "type": "hogql"}] + action=action, + event="custom event", + properties=[{"key": "'a%sd' != 'sdf'", "type": "hogql"}], + ) + _create_event( + event="custom event", + team=self.team, + distinct_id="test", + timestamp="2021-12-04T19:20:00Z", + ) + _create_event( + event="another event", + team=self.team, + distinct_id="test", + timestamp="2021-12-04T19:21:00Z", ) - _create_event(event="custom event", team=self.team, distinct_id="test", timestamp="2021-12-04T19:20:00Z") - _create_event(event="another event", team=self.team, distinct_id="test", timestamp="2021-12-04T19:21:00Z") # action count response = self.client.get(f"/api/projects/{self.team.id}/actions/{action.id}/count").json() @@ -283,10 +322,22 @@ def test_hogql_filter(self, *args): def test_hogql_filter_no_event(self, *args): action = Action.objects.create(team=self.team, name="bla") ActionStep.objects.create( - action=action, event=None, properties=[{"key": "event like 'blue %'", "type": "hogql"}] + action=action, + event=None, + properties=[{"key": "event like 'blue %'", "type": "hogql"}], + ) + _create_event( + event="blue event", + team=self.team, + distinct_id="test", + timestamp="2021-12-04T19:20:00Z", + ) + _create_event( + event="green event", + team=self.team, + distinct_id="test", + timestamp="2021-12-04T19:21:00Z", ) - _create_event(event="blue event", team=self.team, distinct_id="test", timestamp="2021-12-04T19:20:00Z") - _create_event(event="green event", team=self.team, distinct_id="test", timestamp="2021-12-04T19:21:00Z") # action count response = self.client.get(f"/api/projects/{self.team.id}/actions/{action.id}/count").json() @@ -306,14 +357,18 @@ def test_listing_actions_is_not_nplus1(self) -> None: self.client.get(f"/api/projects/{self.team.id}/actions/") Action.objects.create( - team=self.team, name="first", created_by=User.objects.create_and_join(self.organization, "a", "") + team=self.team, + name="first", + created_by=User.objects.create_and_join(self.organization, "a", ""), ) with self.assertNumQueries(7), snapshot_postgres_queries_context(self): self.client.get(f"/api/projects/{self.team.id}/actions/") Action.objects.create( - team=self.team, name="second", created_by=User.objects.create_and_join(self.organization, "b", "") + team=self.team, + name="second", + created_by=User.objects.create_and_join(self.organization, "b", ""), ) with self.assertNumQueries(7), snapshot_postgres_queries_context(self): @@ -332,7 +387,8 @@ def test_get_tags_on_non_ee_returns_empty_list(self): def test_create_tags_on_non_ee_not_allowed(self): response = self.client.post( - f"/api/projects/{self.team.id}/actions/", {"name": "Default", "tags": ["random", "hello"]} + f"/api/projects/{self.team.id}/actions/", + {"name": "Default", "tags": ["random", "hello"]}, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -346,7 +402,11 @@ def test_update_tags_on_non_ee_not_allowed(self): response = self.client.patch( f"/api/projects/{self.team.id}/actions/{action.id}", - {"name": "action new name", "tags": ["random", "hello"], "description": "Internal system metrics."}, + { + "name": "action new name", + "tags": ["random", "hello"], + "description": "Internal system metrics.", + }, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -375,7 +435,11 @@ def test_empty_tags_does_not_delete_tags(self): response = self.client.patch( f"/api/projects/{self.team.id}/actions/{action.id}", - {"name": "action new name", "description": "Internal system metrics.", "tags": []}, + { + "name": "action new name", + "description": "Internal system metrics.", + "tags": [], + }, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -387,7 +451,14 @@ def test_hard_deletion_is_forbidden(self): f"/api/projects/{self.team.id}/actions/", data={ "name": "user signed up", - "steps": [{"text": "sign up", "selector": "div > button", "url": "/signup", "isNew": "asdf"}], + "steps": [ + { + "text": "sign up", + "selector": "div > button", + "url": "/signup", + "isNew": "asdf", + } + ], "description": "Test description", }, HTTP_ORIGIN="http://testserver", diff --git a/posthog/api/test/test_activity_log.py b/posthog/api/test/test_activity_log.py index 4f55c24fac3dc..a59a9e2736de0 100644 --- a/posthog/api/test/test_activity_log.py +++ b/posthog/api/test/test_activity_log.py @@ -13,7 +13,10 @@ def _feature_flag_json_payload(key: str) -> Dict: return { "key": key, "name": "", - "filters": {"groups": [{"properties": [], "rollout_percentage": None}], "multivariate": None}, + "filters": { + "groups": [{"properties": [], "rollout_percentage": None}], + "multivariate": None, + }, "deleted": False, "active": True, "created_by": None, @@ -60,12 +63,14 @@ def _create_and_edit_things(self): frozen_time.tick(delta=timedelta(minutes=6)) flag_one = self.client.post( - f"/api/projects/{self.team.id}/feature_flags/", _feature_flag_json_payload("one") + f"/api/projects/{self.team.id}/feature_flags/", + _feature_flag_json_payload("one"), ).json()["id"] frozen_time.tick(delta=timedelta(minutes=6)) flag_two = self.client.post( - f"/api/projects/{self.team.id}/feature_flags/", _feature_flag_json_payload("two") + f"/api/projects/{self.team.id}/feature_flags/", + _feature_flag_json_payload("two"), ).json()["id"] frozen_time.tick(delta=timedelta(minutes=6)) @@ -118,7 +123,8 @@ def _edit_them_all( frozen_time.tick(delta=timedelta(minutes=6)) assert ( self.client.patch( - f"/api/projects/{self.team.id}/feature_flags/{flag_one}", {"name": f"one-edited-by-{the_user.id}"} + f"/api/projects/{self.team.id}/feature_flags/{flag_one}", + {"name": f"one-edited-by-{the_user.id}"}, ).status_code == status.HTTP_200_OK ) @@ -126,7 +132,8 @@ def _edit_them_all( frozen_time.tick(delta=timedelta(minutes=6)) assert ( self.client.patch( - f"/api/projects/{self.team.id}/feature_flags/{flag_two}", {"name": f"two-edited-by-{the_user.id}"} + f"/api/projects/{self.team.id}/feature_flags/{flag_two}", + {"name": f"two-edited-by-{the_user.id}"}, ).status_code == status.HTTP_200_OK ) @@ -250,7 +257,8 @@ def test_reading_notifications_marks_them_unread(self): # the user can mark where they have read up to bookmark_response = self.client.post( - f"/api/projects/{self.team.id}/activity_log/bookmark_activity_notification", {"bookmark": most_recent_date} + f"/api/projects/{self.team.id}/activity_log/bookmark_activity_notification", + {"bookmark": most_recent_date}, ) assert bookmark_response.status_code == status.HTTP_204_NO_CONTENT @@ -260,7 +268,10 @@ def test_reading_notifications_marks_them_unread(self): assert [c["unread"] for c in changes.json()["results"]] == [True, True] def _create_insight( - self, data: Dict[str, Any], team_id: Optional[int] = None, expected_status: int = status.HTTP_201_CREATED + self, + data: Dict[str, Any], + team_id: Optional[int] = None, + expected_status: int = status.HTTP_201_CREATED, ) -> Tuple[int, Dict[str, Any]]: if team_id is None: team_id = self.team.id diff --git a/posthog/api/test/test_annotation.py b/posthog/api/test/test_annotation.py index c559411f607d5..971028d4f55e7 100644 --- a/posthog/api/test/test_annotation.py +++ b/posthog/api/test/test_annotation.py @@ -6,14 +6,22 @@ from rest_framework import status from posthog.models import Annotation, Organization, Team, User -from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries_context, FuzzyInt +from posthog.test.base import ( + APIBaseTest, + QueryMatchingTest, + snapshot_postgres_queries_context, + FuzzyInt, +) class TestAnnotation(APIBaseTest, QueryMatchingTest): @patch("posthog.api.annotation.report_user_action") def test_retrieving_annotation(self, mock_capture): Annotation.objects.create( - organization=self.organization, team=self.team, created_at="2020-01-04T12:00:00Z", content="hello world!" + organization=self.organization, + team=self.team, + created_at="2020-01-04T12:00:00Z", + content="hello world!", ) # Annotation creation is not reported to PostHog because it has no created_by @@ -89,7 +97,10 @@ def test_cannot_fetch_annotations_of_org_user_does_not_belong_to(self): response_1 = self.client.get(f"/api/projects/{separate_team.id}/annotations/") self.assertEqual(response_1.status_code, 403) - self.assertEqual(response_1.json(), self.permission_denied_response("You don't have access to the project.")) + self.assertEqual( + response_1.json(), + self.permission_denied_response("You don't have access to the project."), + ) response_2 = self.client.get(f"/api/projects/{self.team.id}/annotations/") @@ -122,7 +133,9 @@ def test_creating_annotation(self, mock_capture): # Assert analytics are sent mock_capture.assert_called_once_with( - self.user, "annotation created", {"scope": "organization", "date_marker": date_marker} + self.user, + "annotation created", + {"scope": "organization", "date_marker": date_marker}, ) @patch("posthog.api.annotation.report_user_action") @@ -149,13 +162,17 @@ def test_can_create_annotations_as_a_bot(self, mock_capture): def test_downgrading_scope_from_org_to_project_uses_team_id_from_api(self, mock_capture): second_team = Team.objects.create(organization=self.organization, name="Second team") test_annotation = Annotation.objects.create( - organization=self.organization, team=self.team, content="hello world!", scope=Annotation.Scope.ORGANIZATION + organization=self.organization, + team=self.team, + content="hello world!", + scope=Annotation.Scope.ORGANIZATION, ) mock_capture.reset_mock() # Disregard the "annotation created" call self.client.force_login(self.user) response = self.client.patch( - f"/api/projects/{second_team.id}/annotations/{test_annotation.pk}/", {"scope": Annotation.Scope.PROJECT} + f"/api/projects/{second_team.id}/annotations/{test_annotation.pk}/", + {"scope": Annotation.Scope.PROJECT}, ) test_annotation.refresh_from_db() diff --git a/posthog/api/test/test_app_metrics.py b/posthog/api/test/test_app_metrics.py index 401424170930f..7d3f2a4aa9bf0 100644 --- a/posthog/api/test/test_app_metrics.py +++ b/posthog/api/test/test_app_metrics.py @@ -62,7 +62,13 @@ def test_retrieve(self): "failures": [0, 0, 0, 0, 0, 0, 1, 0], "totals": {"successes": 3, "successes_on_retry": 0, "failures": 1}, }, - "errors": [{"error_type": "SomeError", "count": 1, "last_seen": "2021-12-04T00:00:00Z"}], + "errors": [ + { + "error_type": "SomeError", + "count": 1, + "last_seen": "2021-12-04T00:00:00Z", + } + ], }, ) @@ -99,7 +105,11 @@ def test_retrieve_historical_export(self): activity="job_triggered", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload=SAMPLE_PAYLOAD, + ), ), ) with freeze_time("2021-08-25T05:00:00Z"): @@ -107,7 +117,11 @@ def test_retrieve_historical_export(self): activity="export_success", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload={}), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload={}, + ), ), ) @@ -159,7 +173,11 @@ def test_retrieve_historical_export(self): "successes": [0, 102, 0, 10, 0, 0, 0], "successes_on_retry": [0, 0, 0, 0, 0, 0, 0], "failures": [0, 0, 1, 0, 0, 0, 0], - "totals": {"successes": 112, "successes_on_retry": 0, "failures": 1}, + "totals": { + "successes": 112, + "successes_on_retry": 0, + "failures": 1, + }, }, "summary": { "duration": 4 * 60 * 60, @@ -170,7 +188,13 @@ def test_retrieve_historical_export(self): "created_at": "2021-08-25T01:00:00Z", "created_by": mock.ANY, }, - "errors": [{"error_type": "SomeError", "count": 1, "last_seen": "2021-08-25T02:55:00Z"}], + "errors": [ + { + "error_type": "SomeError", + "count": 1, + "last_seen": "2021-08-25T02:55:00Z", + } + ], }, ) diff --git a/posthog/api/test/test_async_migrations.py b/posthog/api/test/test_async_migrations.py index cedf64f27a2c3..f3b38a752dfb6 100644 --- a/posthog/api/test/test_async_migrations.py +++ b/posthog/api/test/test_async_migrations.py @@ -1,7 +1,11 @@ from unittest.mock import patch from posthog.async_migrations.definition import AsyncMigrationDefinition -from posthog.models.async_migration import AsyncMigration, AsyncMigrationError, MigrationStatus +from posthog.models.async_migration import ( + AsyncMigration, + AsyncMigrationError, + MigrationStatus, +) from posthog.test.base import APIBaseTest @@ -28,7 +32,6 @@ def setUp(self): return super().setUp() def test_get_async_migrations_without_staff_status(self): - response = self.client.get(f"/api/async_migrations/").json() self.assertEqual(response["count"], 0) @@ -55,7 +58,8 @@ def test_trigger_endpoint(self, mock_run_async_migration): sm1 = create_async_migration() response = self.client.post( - f"/api/async_migrations/{sm1.id}/trigger", {"parameters": {"SOME_KEY": 1234}} + f"/api/async_migrations/{sm1.id}/trigger", + {"parameters": {"SOME_KEY": 1234}}, ).json() sm1.refresh_from_db() @@ -118,4 +122,7 @@ def test_force_rollback_endpoint_migration_not_complete(self): response = self.client.post(f"/api/async_migrations/{sm1.id}/force_rollback").json() self.assertEqual(response["success"], False) - self.assertEqual(response["error"], "Can't force rollback a migration that did not complete successfully.") + self.assertEqual( + response["error"], + "Can't force rollback a migration that did not complete successfully.", + ) diff --git a/posthog/api/test/test_authentication.py b/posthog/api/test/test_authentication.py index f4bc89beb2519..6ce47c6c1d233 100644 --- a/posthog/api/test/test_authentication.py +++ b/posthog/api/test/test_authentication.py @@ -34,7 +34,11 @@ class TestLoginPrecheckAPI(APIBaseTest): CONFIG_AUTO_LOGIN = False def test_login_precheck_with_unenforced_sso(self): - OrganizationDomain.objects.create(domain="witw.app", organization=self.organization, verified_at=timezone.now()) + OrganizationDomain.objects.create( + domain="witw.app", + organization=self.organization, + verified_at=timezone.now(), + ) response = self.client.post("/api/login/precheck", {"email": "any_user_name_here@witw.app"}) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -82,7 +86,11 @@ def test_user_logs_in_with_email_and_password(self, mock_capture, mock_identify) self.user.distinct_id, "user logged in", properties={"social_provider": ""}, - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) @patch("posthog.api.authentication.is_email_available", return_value=True) @@ -142,7 +150,10 @@ def test_user_cant_login_with_incorrect_password(self, mock_capture): @patch("posthoganalytics.capture") def test_user_cant_login_with_incorrect_email(self, mock_capture): - response = self.client.post("/api/login", {"email": "user2@posthog.com", "password": self.CONFIG_PASSWORD}) + response = self.client.post( + "/api/login", + {"email": "user2@posthog.com", "password": self.CONFIG_PASSWORD}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json(), self.ERROR_INVALID_CREDENTIALS) @@ -183,7 +194,10 @@ def test_login_endpoint_is_protected_against_brute_force_attempts(self): # Fill the attempt limit with self.settings(AXES_ENABLED=True, AXES_FAILURE_LIMIT=3): for _ in range(0, 2): - response = self.client.post("/api/login", {"email": "new_user@posthog.com", "password": "invalid"}) + response = self.client.post( + "/api/login", + {"email": "new_user@posthog.com", "password": "invalid"}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json(), self.ERROR_INVALID_CREDENTIALS) @@ -210,7 +224,12 @@ def test_login_2fa_enabled(self): self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual( response.json(), - {"type": "server_error", "code": "2fa_required", "detail": "2FA is required.", "attr": None}, + { + "type": "server_error", + "code": "2fa_required", + "detail": "2FA is required.", + "attr": None, + }, ) # Assert user is not logged in @@ -234,11 +253,19 @@ def test_2fa_expired(self): self.user.totpdevice_set.create(name="default", key=random_hex(), digits=6) # type: ignore with freeze_time("2023-01-01T10:00:00"): - response = self.client.post("/api/login", {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}) + response = self.client.post( + "/api/login", + {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}, + ) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) self.assertEqual( response.json(), - {"type": "server_error", "code": "2fa_required", "detail": "2FA is required.", "attr": None}, + { + "type": "server_error", + "code": "2fa_required", + "detail": "2FA is required.", + "attr": None, + }, ) with freeze_time("2023-01-01T10:30:00"): @@ -260,9 +287,13 @@ def test_2fa_expired(self): def test_2fa_throttling(self): self.user.totpdevice_set.create(name="default", key=random_hex(), digits=6) # type: ignore self.client.post("/api/login", {"email": self.CONFIG_EMAIL, "password": self.CONFIG_PASSWORD}) - self.assertEqual(self.client.post("/api/login/token", {"token": "abcdefg"}).json()["code"], "2fa_invalid") self.assertEqual( - self.client.post("/api/login/token", {"token": "abcdefg"}).json()["code"], "2fa_too_many_attempts" + self.client.post("/api/login/token", {"token": "abcdefg"}).json()["code"], + "2fa_invalid", + ) + self.assertEqual( + self.client.post("/api/login/token", {"token": "abcdefg"}).json()["code"], + "2fa_too_many_attempts", ) @@ -289,7 +320,8 @@ def test_anonymous_user_can_request_password_reset(self, mock_capture): user: User = User.objects.get(email=self.CONFIG_EMAIL) self.assertEqual( - user.requested_password_reset_at, datetime.datetime(2021, 10, 5, 12, 0, 0, tzinfo=timezone.utc) + user.requested_password_reset_at, + datetime.datetime(2021, 10, 5, 12, 0, 0, tzinfo=timezone.utc), ) self.assertSetEqual({",".join(outmail.to) for outmail in mail.outbox}, {self.CONFIG_EMAIL}) @@ -299,7 +331,9 @@ def test_anonymous_user_can_request_password_reset(self, mock_capture): html_message = mail.outbox[0].alternatives[0][0] # type: ignore self.validate_basic_html( - html_message, "https://my.posthog.net", preheader="Please follow the link inside to reset your password." + html_message, + "https://my.posthog.net", + preheader="Please follow the link inside to reset your password.", ) # validate reset token @@ -307,7 +341,8 @@ def test_anonymous_user_can_request_password_reset(self, mock_capture): reset_link = html_message[link_index : html_message.find('"', link_index)] self.assertTrue( password_reset_token_generator.check_token( - self.user, reset_link.replace("https://my.posthog.net/reset/", "").replace(f"{self.user.uuid}/", "") + self.user, + reset_link.replace("https://my.posthog.net/reset/", "").replace(f"{self.user.uuid}/", ""), ) ) @@ -337,7 +372,9 @@ def test_reset_with_sso_available(self): html_message = mail.outbox[0].alternatives[0][0] # type: ignore self.validate_basic_html( - html_message, "https://my.posthog.net", preheader="Please follow the link inside to reset your password." + html_message, + "https://my.posthog.net", + preheader="Please follow the link inside to reset your password.", ) # validate reset token @@ -345,7 +382,8 @@ def test_reset_with_sso_available(self): reset_link = html_message[link_index : html_message.find('"', link_index)] self.assertTrue( password_reset_token_generator.check_token( - self.user, reset_link.replace(f"https://my.posthog.net/reset/{self.user.uuid}/", "") + self.user, + reset_link.replace(f"https://my.posthog.net/reset/{self.user.uuid}/", ""), ) ) @@ -389,7 +427,8 @@ def test_cant_reset_more_than_six_times(self): # Fourth request should fail self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) self.assertDictContainsSubset( - {"attr": None, "code": "throttled", "type": "throttled_error"}, response.json() + {"attr": None, "code": "throttled", "type": "throttled_error"}, + response.json(), ) # Three emails should be sent, fourth should not @@ -408,7 +447,12 @@ def test_cant_validate_token_without_a_token(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), - {"type": "validation_error", "code": "required", "detail": "This field is required.", "attr": "token"}, + { + "type": "validation_error", + "code": "required", + "detail": "This field is required.", + "attr": "token", + }, ) def test_invalid_token_returns_error(self): @@ -418,7 +462,12 @@ def test_invalid_token_returns_error(self): # tokens expire after one day expired_token = password_reset_token_generator.make_token(self.user) - for token in [valid_token[:-1], "not_even_trying", self.user.uuid, expired_token]: + for token in [ + valid_token[:-1], + "not_even_trying", + self.user.uuid, + expired_token, + ]: response = self.client.get(f"/api/reset/{self.user.uuid}/?token={token}") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -470,12 +519,20 @@ def test_user_can_reset_password(self, mock_capture, mock_identify): self.user.distinct_id, "user logged in", properties={"social_provider": ""}, - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) mock_capture.assert_any_call( self.user.distinct_id, "user password reset", - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) self.assertEqual(mock_capture.call_count, 2) @@ -507,7 +564,12 @@ def test_cant_reset_password_with_no_token(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), - {"type": "validation_error", "code": "required", "detail": "This field is required.", "attr": "token"}, + { + "type": "validation_error", + "code": "required", + "detail": "This field is required.", + "attr": "token", + }, ) # user remains logged out @@ -526,8 +588,16 @@ def test_cant_reset_password_with_invalid_token(self): # tokens expire after one day expired_token = password_reset_token_generator.make_token(self.user) - for token in [valid_token[:-1], "not_even_trying", self.user.uuid, expired_token]: - response = self.client.post(f"/api/reset/{self.user.uuid}/", {"token": token, "password": "a12345678"}) + for token in [ + valid_token[:-1], + "not_even_trying", + self.user.uuid, + expired_token, + ]: + response = self.client.post( + f"/api/reset/{self.user.uuid}/", + {"token": token, "password": "a12345678"}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), @@ -579,7 +649,8 @@ def test_e2e_test_special_handlers(self): with self.settings(E2E_TESTING=True): response = self.client.post( - "/api/reset/e2e_test_user/", {"token": "e2e_test_token", "password": "a12345678"} + "/api/reset/e2e_test_user/", + {"token": "e2e_test_token", "password": "a12345678"}, ) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) @@ -590,7 +661,10 @@ def test_personal_api_key_updates_last_used_at_hourly(self): personal_api_key = generate_random_token_personal() PersonalAPIKey.objects.create( - label="X", user=self.user, last_used_at="2021-08-25T21:09:14", secure_value=hash_key_value(personal_api_key) + label="X", + user=self.user, + last_used_at="2021-08-25T21:09:14", + secure_value=hash_key_value(personal_api_key), ) with freeze_time("2021-08-25T22:10:14.252"): @@ -610,7 +684,10 @@ def test_personal_api_key_updates_last_used_at_outside_the_year(self): personal_api_key = generate_random_token_personal() PersonalAPIKey.objects.create( - label="X", user=self.user, last_used_at="2021-08-25T21:09:14", secure_value=hash_key_value(personal_api_key) + label="X", + user=self.user, + last_used_at="2021-08-25T21:09:14", + secure_value=hash_key_value(personal_api_key), ) with freeze_time("2022-08-25T22:00:14.252"): @@ -630,7 +707,10 @@ def test_personal_api_key_updates_last_used_at_outside_the_day(self): personal_api_key = generate_random_token_personal() PersonalAPIKey.objects.create( - label="X", user=self.user, last_used_at="2021-08-25T21:09:14", secure_value=hash_key_value(personal_api_key) + label="X", + user=self.user, + last_used_at="2021-08-25T21:09:14", + secure_value=hash_key_value(personal_api_key), ) with freeze_time("2021-08-26T22:00:14.252"): @@ -668,7 +748,10 @@ def test_personal_api_key_does_not_update_last_used_at_within_the_hour(self): personal_api_key = generate_random_token_personal() PersonalAPIKey.objects.create( - label="X", user=self.user, last_used_at="2021-08-25T21:09:14", secure_value=hash_key_value(personal_api_key) + label="X", + user=self.user, + last_used_at="2021-08-25T21:09:14", + secure_value=hash_key_value(personal_api_key), ) with freeze_time("2021-08-25T21:14:14.252"): @@ -687,7 +770,10 @@ def test_personal_api_key_does_not_update_last_used_at_when_in_the_past(self): personal_api_key = generate_random_token_personal() PersonalAPIKey.objects.create( - label="X", user=self.user, last_used_at="2021-08-25T21:09:14", secure_value=hash_key_value(personal_api_key) + label="X", + user=self.user, + last_used_at="2021-08-25T21:09:14", + secure_value=hash_key_value(personal_api_key), ) with freeze_time("2021-08-24T21:14:14.252"): diff --git a/posthog/api/test/test_capture.py b/posthog/api/test/test_capture.py index fae47f35cbd8b..d3e53b44375f1 100644 --- a/posthog/api/test/test_capture.py +++ b/posthog/api/test/test_capture.py @@ -50,7 +50,10 @@ def mocked_get_ingest_context_from_token(_: Any) -> None: raise Exception("test exception") -parser = ResolvingParser(url=str(pathlib.Path(__file__).parent / "../../../openapi/capture.yaml"), strict=True) +parser = ResolvingParser( + url=str(pathlib.Path(__file__).parent / "../../../openapi/capture.yaml"), + strict=True, +) openapi_spec = cast(Dict[str, Any], parser.specification) @@ -122,7 +125,11 @@ def _send_session_recording_event( } self.client.post( - "/s/", data={"data": json.dumps([event for _ in range(number_of_events)]), "api_key": self.team.api_token} + "/s/", + data={ + "data": json.dumps([event for _ in range(number_of_events)]), + "api_key": self.team.api_token, + }, ) return event @@ -149,7 +156,10 @@ def test_capture_randomly_partitions_with_likely_anonymous_ids(self, kafka_produ }, } with self.assertNumQueries(0): # Capture does not hit PG anymore - self.client.get("/e/?data=%s" % quote(self._to_json(data)), HTTP_ORIGIN="https://localhost") + self.client.get( + "/e/?data=%s" % quote(self._to_json(data)), + HTTP_ORIGIN="https://localhost", + ) kafka_produce.assert_called_with(topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, data=ANY, key=None) @@ -174,7 +184,10 @@ def test_cached_is_randomly_partitioned(self): # First time we see this key it's looked up in local config. # The bucket has capacity to serve 1 requests/key, so we are not immediately returning. # Since local config is empty and bucket has capacity, this should not override. - with self.settings(EVENT_PARTITION_KEYS_TO_OVERRIDE=[], PARTITION_KEY_AUTOMATIC_OVERRIDE_ENABLED=True): + with self.settings( + EVENT_PARTITION_KEYS_TO_OVERRIDE=[], + PARTITION_KEY_AUTOMATIC_OVERRIDE_ENABLED=True, + ): assert capture.is_randomly_partitioned(partition_key) is False assert limiter._storage._buckets[partition_key][0] == 0 @@ -186,7 +199,10 @@ def test_cached_is_randomly_partitioned(self): with freeze_time(start + timedelta(seconds=1)): # Now we have let one second pass so the bucket must have capacity to serve the request. # We once again look at the local configuration, which is empty. - with self.settings(EVENT_PARTITION_KEYS_TO_OVERRIDE=[], PARTITION_KEY_AUTOMATIC_OVERRIDE_ENABLED=True): + with self.settings( + EVENT_PARTITION_KEYS_TO_OVERRIDE=[], + PARTITION_KEY_AUTOMATIC_OVERRIDE_ENABLED=True, + ): assert capture.is_randomly_partitioned(partition_key) is False @patch("posthog.kafka_client.client._KafkaProducer.produce") @@ -197,13 +213,26 @@ def test_capture_event(self, kafka_produce): "distinct_id": 2, "token": self.team.api_token, "$elements": [ - {"tag_name": "a", "nth_child": 1, "nth_of_type": 2, "attr__class": "btn btn-sm"}, - {"tag_name": "div", "nth_child": 1, "nth_of_type": 2, "$el_text": "💻"}, + { + "tag_name": "a", + "nth_child": 1, + "nth_of_type": 2, + "attr__class": "btn btn-sm", + }, + { + "tag_name": "div", + "nth_child": 1, + "nth_of_type": 2, + "$el_text": "💻", + }, ], }, } with self.assertNumQueries(0): # Capture does not hit PG anymore - response = self.client.get("/e/?data=%s" % quote(self._to_json(data)), HTTP_ORIGIN="https://localhost") + response = self.client.get( + "/e/?data=%s" % quote(self._to_json(data)), + HTTP_ORIGIN="https://localhost", + ) self.assertEqual(response.get("access-control-allow-origin"), "https://localhost") self.assertDictContainsSubset( @@ -231,13 +260,26 @@ def test_capture_event_shortcircuits(self, kafka_produce, patch_axes): "distinct_id": 2, "token": self.team.api_token, "$elements": [ - {"tag_name": "a", "nth_child": 1, "nth_of_type": 2, "attr__class": "btn btn-sm"}, - {"tag_name": "div", "nth_child": 1, "nth_of_type": 2, "$el_text": "💻"}, + { + "tag_name": "a", + "nth_child": 1, + "nth_of_type": 2, + "attr__class": "btn btn-sm", + }, + { + "tag_name": "div", + "nth_child": 1, + "nth_of_type": 2, + "$el_text": "💻", + }, ], }, } with self.assertNumQueries(0): - response = self.client.get("/e/?data=%s" % quote(self._to_json(data)), HTTP_ORIGIN="https://localhost") + response = self.client.get( + "/e/?data=%s" % quote(self._to_json(data)), + HTTP_ORIGIN="https://localhost", + ) self.assertEqual(response.get("access-control-allow-origin"), "https://localhost") self.assertDictContainsSubset( { @@ -287,8 +329,18 @@ def test_capture_events_503_on_kafka_produce_errors(self, kafka_produce): "distinct_id": 2, "token": self.team.api_token, "$elements": [ - {"tag_name": "a", "nth_child": 1, "nth_of_type": 2, "attr__class": "btn btn-sm"}, - {"tag_name": "div", "nth_child": 1, "nth_of_type": 2, "$el_text": "💻"}, + { + "tag_name": "a", + "nth_child": 1, + "nth_of_type": 2, + "attr__class": "btn btn-sm", + }, + { + "tag_name": "div", + "nth_child": 1, + "nth_of_type": 2, + "$el_text": "💻", + }, ], }, } @@ -298,10 +350,15 @@ def test_capture_events_503_on_kafka_produce_errors(self, kafka_produce): @patch("posthog.kafka_client.client._KafkaProducer.produce") def test_capture_event_ip(self, kafka_produce): - data = {"event": "some_event", "properties": {"distinct_id": 2, "token": self.team.api_token}} + data = { + "event": "some_event", + "properties": {"distinct_id": 2, "token": self.team.api_token}, + } self.client.get( - "/e/?data=%s" % quote(self._to_json(data)), HTTP_X_FORWARDED_FOR="1.2.3.4", HTTP_ORIGIN="https://localhost" + "/e/?data=%s" % quote(self._to_json(data)), + HTTP_X_FORWARDED_FOR="1.2.3.4", + HTTP_ORIGIN="https://localhost", ) self.assertDictContainsSubset( { @@ -316,7 +373,10 @@ def test_capture_event_ip(self, kafka_produce): @patch("posthog.kafka_client.client._KafkaProducer.produce") def test_capture_event_ipv6(self, kafka_produce): - data = {"event": "some_event", "properties": {"distinct_id": 2, "token": self.team.api_token}} + data = { + "event": "some_event", + "properties": {"distinct_id": 2, "token": self.team.api_token}, + } self.client.get( "/e/?data=%s" % quote(self._to_json(data)), @@ -337,7 +397,10 @@ def test_capture_event_ipv6(self, kafka_produce): # Regression test as Azure Gateway forwards ipv4 ips with a port number @patch("posthog.kafka_client.client._KafkaProducer.produce") def test_capture_event_ip_with_port(self, kafka_produce): - data = {"event": "some_event", "properties": {"distinct_id": 2, "token": self.team.api_token}} + data = { + "event": "some_event", + "properties": {"distinct_id": 2, "token": self.team.api_token}, + } self.client.get( "/e/?data=%s" % quote(self._to_json(data)), @@ -368,13 +431,26 @@ def test_capture_event_adds_library_to_sentry(self, patched_scope): "distinct_id": 2, "token": self.team.api_token, "$elements": [ - {"tag_name": "a", "nth_child": 1, "nth_of_type": 2, "attr__class": "btn btn-sm"}, - {"tag_name": "div", "nth_child": 1, "nth_of_type": 2, "$el_text": "💻"}, + { + "tag_name": "a", + "nth_child": 1, + "nth_of_type": 2, + "attr__class": "btn btn-sm", + }, + { + "tag_name": "div", + "nth_child": 1, + "nth_of_type": 2, + "$el_text": "💻", + }, ], }, } with freeze_time(timezone.now()): - self.client.get("/e/?data=%s" % quote(self._to_json(data)), HTTP_ORIGIN="https://localhost") + self.client.get( + "/e/?data=%s" % quote(self._to_json(data)), + HTTP_ORIGIN="https://localhost", + ) mock_set_tag.assert_has_calls([call("library", "web"), call("library.version", "1.14.1")]) @@ -389,13 +465,26 @@ def test_capture_event_adds_unknown_to_sentry_when_no_properties_sent(self, patc "distinct_id": 2, "token": self.team.api_token, "$elements": [ - {"tag_name": "a", "nth_child": 1, "nth_of_type": 2, "attr__class": "btn btn-sm"}, - {"tag_name": "div", "nth_child": 1, "nth_of_type": 2, "$el_text": "💻"}, + { + "tag_name": "a", + "nth_child": 1, + "nth_of_type": 2, + "attr__class": "btn btn-sm", + }, + { + "tag_name": "div", + "nth_child": 1, + "nth_of_type": 2, + "$el_text": "💻", + }, ], }, } with freeze_time(timezone.now()): - self.client.get("/e/?data=%s" % quote(self._to_json(data)), HTTP_ORIGIN="https://localhost") + self.client.get( + "/e/?data=%s" % quote(self._to_json(data)), + HTTP_ORIGIN="https://localhost", + ) mock_set_tag.assert_has_calls([call("library", "unknown"), call("library.version", "unknown")]) @@ -406,8 +495,20 @@ def test_multiple_events(self, kafka_produce): data={ "data": json.dumps( [ - {"event": "beep", "properties": {"distinct_id": "eeee", "token": self.team.api_token}}, - {"event": "boop", "properties": {"distinct_id": "aaaa", "token": self.team.api_token}}, + { + "event": "beep", + "properties": { + "distinct_id": "eeee", + "token": self.team.api_token, + }, + }, + { + "event": "boop", + "properties": { + "distinct_id": "aaaa", + "token": self.team.api_token, + }, + }, ] ), "api_key": self.team.api_token, @@ -427,9 +528,18 @@ def test_drops_performance_events(self, kafka_produce): [ { "event": "$performance_event", - "properties": {"distinct_id": "eeee", "token": self.team.api_token}, + "properties": { + "distinct_id": "eeee", + "token": self.team.api_token, + }, + }, + { + "event": "boop", + "properties": { + "distinct_id": "aaaa", + "token": self.team.api_token, + }, }, - {"event": "boop", "properties": {"distinct_id": "aaaa", "token": self.team.api_token}}, ] ), "api_key": self.team.api_token, @@ -535,7 +645,9 @@ def test_invalid_gzip(self, kafka_produce): self.team.save() response = self.client.post( - "/track?compression=gzip", data=b"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03", content_type="text/plain" + "/track?compression=gzip", + data=b"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\x03", + content_type="text/plain", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -559,7 +671,8 @@ def test_invalid_lz64(self, kafka_produce): self.assertEqual( response.json(), self.validation_error_response( - "Malformed request data: Failed to decompress data.", code="invalid_payload" + "Malformed request data: Failed to decompress data.", + code="invalid_payload", ), ) self.assertEqual(kafka_produce.call_count, 0) @@ -582,7 +695,11 @@ def test_empty_request_returns_an_error(self, kafka_produce): """ # Empty GET - response = self.client.get("/e/?data=", content_type="application/json", HTTP_ORIGIN="https://localhost") + response = self.client.get( + "/e/?data=", + content_type="application/json", + HTTP_ORIGIN="https://localhost", + ) self.assertEqual(response.status_code, 400) self.assertEqual(kafka_produce.call_count, 0) @@ -595,7 +712,9 @@ def test_empty_request_returns_an_error(self, kafka_produce): def test_batch(self, kafka_produce): data = {"type": "capture", "event": "user signed up", "distinct_id": "2"} self.client.post( - "/batch/", data={"api_key": self.team.api_token, "batch": [data]}, content_type="application/json" + "/batch/", + data={"api_key": self.team.api_token, "batch": [data]}, + content_type="application/json", ) arguments = self._to_arguments(kafka_produce) arguments.pop("now") # can't compare fakedate @@ -622,14 +741,17 @@ def test_batch_with_invalid_event(self, kafka_produce): {"type": "capture", "event": "event5", "distinct_id": "2"}, ] response = self.client.post( - "/batch/", data={"api_key": self.team.api_token, "batch": data}, content_type="application/json" + "/batch/", + data={"api_key": self.team.api_token, "batch": data}, + content_type="application/json", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), self.validation_error_response( - 'Invalid payload: All events must have the event field "distinct_id"!', code="invalid_payload" + 'Invalid payload: All events must have the event field "distinct_id"!', + code="invalid_payload", ), ) self.assertEqual(kafka_produce.call_count, 0) @@ -656,14 +778,17 @@ def test_batch_with_dumped_json_data(self, kafka_produce): """ data = json.dumps([{"event": "$groupidentify", "distinct_id": "2", "properties": {}}]) response = self.client.post( - "/batch/", data={"api_key": self.team.api_token, "batch": data}, content_type="application/json" + "/batch/", + data={"api_key": self.team.api_token, "batch": data}, + content_type="application/json", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), self.validation_error_response( - "Invalid payload: All events must be dictionaries not 'str'!", code="invalid_payload" + "Invalid payload: All events must be dictionaries not 'str'!", + code="invalid_payload", ), ) self.assertEqual(kafka_produce.call_count, 0) @@ -786,7 +911,13 @@ def test_batch_incorrect_token_shape(self): "/batch/", data={ "api_key": {"some": "object"}, - "batch": [{"type": "capture", "event": "user signed up", "distinct_id": "whatever"}], + "batch": [ + { + "type": "capture", + "event": "user signed up", + "distinct_id": "whatever", + } + ], }, content_type="application/json", ) @@ -803,7 +934,15 @@ def test_batch_incorrect_token_shape(self): def test_batch_token_not_set(self): response = self.client.post( "/batch/", - data={"batch": [{"type": "capture", "event": "user signed up", "distinct_id": "whatever"}]}, + data={ + "batch": [ + { + "type": "capture", + "event": "user signed up", + "distinct_id": "whatever", + } + ] + }, content_type="application/json", ) @@ -822,7 +961,10 @@ def test_batch_token_not_set(self): def test_batch_distinct_id_not_set(self, statsd_incr): response = self.client.post( "/batch/", - data={"api_key": self.team.api_token, "batch": [{"type": "capture", "event": "user signed up"}]}, + data={ + "api_key": self.team.api_token, + "batch": [{"type": "capture", "event": "user signed up"}], + }, content_type="application/json", ) @@ -830,7 +972,8 @@ def test_batch_distinct_id_not_set(self, statsd_incr): self.assertEqual( response.json(), self.validation_error_response( - 'Invalid payload: All events must have the event field "distinct_id"!', code="invalid_payload" + 'Invalid payload: All events must have the event field "distinct_id"!', + code="invalid_payload", ), ) @@ -893,11 +1036,18 @@ def test_python_library(self, kafka_produce): def test_base64_decode_variations(self, kafka_produce): base64 = "eyJldmVudCI6IiRwYWdldmlldyIsInByb3BlcnRpZXMiOnsiZGlzdGluY3RfaWQiOiJlZWVlZWVlZ8+lZWVlZWUifX0=" dict = self._dict_from_b64(base64) - self.assertDictEqual(dict, {"event": "$pageview", "properties": {"distinct_id": "eeeeeeegϥeeeee"}}) + self.assertDictEqual( + dict, + {"event": "$pageview", "properties": {"distinct_id": "eeeeeeegϥeeeee"}}, + ) # POST with "+" in the base64 self.client.post( - "/track/", data={"data": base64, "api_key": self.team.api_token} # main difference in this test + "/track/", + data={ + "data": base64, + "api_key": self.team.api_token, + }, # main difference in this test ) arguments = self._to_arguments(kafka_produce) self.assertEqual(arguments["token"], self.team.api_token) @@ -906,7 +1056,10 @@ def test_base64_decode_variations(self, kafka_produce): # POST with " " in the base64 instead of the "+" self.client.post( "/track/", - data={"data": base64.replace("+", " "), "api_key": self.team.api_token}, # main difference in this test + data={ + "data": base64.replace("+", " "), + "api_key": self.team.api_token, + }, # main difference in this test ) arguments = self._to_arguments(kafka_produce) self.assertEqual(arguments["token"], self.team.api_token) @@ -972,7 +1125,11 @@ def test_sent_at_field(self, kafka_produce): data={ "sent_at": tomorrow_sent_at.isoformat(), "data": self._dict_to_b64( - {"event": "$pageview", "timestamp": tomorrow.isoformat(), "properties": {"distinct_id": "eeee"}} + { + "event": "$pageview", + "timestamp": tomorrow.isoformat(), + "properties": {"distinct_id": "eeee"}, + } ), "api_key": self.team.api_token, # main difference in this test }, @@ -987,7 +1144,9 @@ def test_sent_at_field(self, kafka_produce): def test_incorrect_json(self): response = self.client.post( - "/capture/", '{"event": "incorrect json with trailing comma",}', content_type="application/json" + "/capture/", + '{"event": "incorrect json with trailing comma",}', + content_type="application/json", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -1014,7 +1173,8 @@ def test_distinct_id_nan(self, statsd_incr): self.assertEqual( response.json(), self.validation_error_response( - 'Invalid payload: Event field "distinct_id" should not be blank!', code="invalid_payload" + 'Invalid payload: Event field "distinct_id" should not be blank!', + code="invalid_payload", ), ) @@ -1029,7 +1189,12 @@ def test_distinct_id_nan(self, statsd_incr): def test_distinct_id_set_but_null(self, statsd_incr): response = self.client.post( "/e/", - data={"api_key": self.team.api_token, "type": "capture", "event": "user signed up", "distinct_id": None}, + data={ + "api_key": self.team.api_token, + "type": "capture", + "event": "user signed up", + "distinct_id": None, + }, content_type="application/json", ) @@ -1037,7 +1202,8 @@ def test_distinct_id_set_but_null(self, statsd_incr): self.assertEqual( response.json(), self.validation_error_response( - 'Invalid payload: Event field "distinct_id" should not be blank!', code="invalid_payload" + 'Invalid payload: Event field "distinct_id" should not be blank!', + code="invalid_payload", ), ) @@ -1052,7 +1218,12 @@ def test_distinct_id_set_but_null(self, statsd_incr): def test_event_name_missing(self, statsd_incr): response = self.client.post( "/e/", - data={"api_key": self.team.api_token, "type": "capture", "event": "", "distinct_id": "a valid id"}, + data={ + "api_key": self.team.api_token, + "type": "capture", + "event": "", + "distinct_id": "a valid id", + }, content_type="application/json", ) @@ -1072,7 +1243,12 @@ def test_custom_uuid(self, kafka_produce) -> None: uuid = "01823e89-f75d-0000-0d4d-3d43e54f6de5" response = self.client.post( "/e/", - data={"api_key": self.team.api_token, "event": "some_event", "distinct_id": "1", "uuid": uuid}, + data={ + "api_key": self.team.api_token, + "event": "some_event", + "distinct_id": "1", + "uuid": uuid, + }, content_type="application/json", ) @@ -1085,7 +1261,12 @@ def test_custom_uuid(self, kafka_produce) -> None: def test_custom_uuid_invalid(self, statsd_incr) -> None: response = self.client.post( "/e/", - data={"api_key": self.team.api_token, "event": "some_event", "distinct_id": "1", "uuid": "invalid_uuid"}, + data={ + "api_key": self.team.api_token, + "event": "some_event", + "distinct_id": "1", + "uuid": "invalid_uuid", + }, content_type="application/json", ) @@ -1093,7 +1274,8 @@ def test_custom_uuid_invalid(self, statsd_incr) -> None: self.assertEqual( response.json(), self.validation_error_response( - 'Invalid payload: Event field "uuid" is not a valid UUID!', code="invalid_payload" + 'Invalid payload: Event field "uuid" is not a valid UUID!', + code="invalid_payload", ), ) @@ -1106,28 +1288,38 @@ def test_custom_uuid_invalid(self, statsd_incr) -> None: def test_handle_lacking_event_name_field(self): response = self.client.post( "/e/", - data={"distinct_id": "abc", "properties": {"cost": 2}, "api_key": self.team.api_token}, + data={ + "distinct_id": "abc", + "properties": {"cost": 2}, + "api_key": self.team.api_token, + }, content_type="application/json", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), self.validation_error_response( - 'Invalid payload: All events must have the event name field "event"!', code="invalid_payload" + 'Invalid payload: All events must have the event name field "event"!', + code="invalid_payload", ), ) def test_handle_invalid_snapshot(self): response = self.client.post( "/e/", - data={"event": "$snapshot", "distinct_id": "abc", "api_key": self.team.api_token}, + data={ + "event": "$snapshot", + "distinct_id": "abc", + "api_key": self.team.api_token, + }, content_type="application/json", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), self.validation_error_response( - 'Invalid payload: $snapshot events must contain property "$snapshot_data"!', code="invalid_payload" + 'Invalid payload: $snapshot events must contain property "$snapshot_data"!', + code="invalid_payload", ), ) @@ -1202,7 +1394,10 @@ def test_legacy_recording_ingestion_compression_and_transformation(self, kafka_p event_data=event_data, ) self.assertEqual(kafka_produce.call_count, 1) - self.assertEqual(kafka_produce.call_args_list[0][1]["topic"], KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS) + self.assertEqual( + kafka_produce.call_args_list[0][1]["topic"], + KAFKA_SESSION_RECORDING_SNAPSHOT_ITEM_EVENTS, + ) key = kafka_produce.call_args_list[0][1]["key"] self.assertEqual(key, session_id) data_sent_to_kafka = json.loads(kafka_produce.call_args_list[0][1]["data"]["data"]) @@ -1252,7 +1447,10 @@ def test_create_session_recording_kafka_with_expected_hosts( with self.settings( KAFKA_HOSTS=["first.server:9092", "second.server:9092"], KAFKA_SECURITY_PROTOCOL="SASL_SSL", - SESSION_RECORDING_KAFKA_HOSTS=["another-server:9092", "a-fourth.server:9092"], + SESSION_RECORDING_KAFKA_HOSTS=[ + "another-server:9092", + "a-fourth.server:9092", + ], SESSION_RECORDING_KAFKA_SECURITY_PROTOCOL="SSL", SESSION_RECORDING_KAFKA_MAX_REQUEST_SIZE_BYTES=1234, ): @@ -1283,7 +1481,10 @@ def test_can_redirect_session_recordings_to_alternative_kafka( ) -> None: with self.settings( KAFKA_HOSTS=["first.server:9092", "second.server:9092"], - SESSION_RECORDING_KAFKA_HOSTS=["another-server:9092", "a-fourth.server:9092"], + SESSION_RECORDING_KAFKA_HOSTS=[ + "another-server:9092", + "a-fourth.server:9092", + ], ): default_kafka_producer_mock.return_value = KafkaProducer() session_recording_producer_factory_mock.return_value = sessionRecordingKafkaProducer() @@ -1353,8 +1554,14 @@ def test_capture_event_can_override_attributes_important_in_replicator_exports(s def test_quota_limits_ignored_if_disabled(self, kafka_produce) -> None: from ee.billing.quota_limiting import QuotaResource, replace_limited_team_tokens - replace_limited_team_tokens(QuotaResource.RECORDINGS, {self.team.api_token: timezone.now().timestamp() + 10000}) - replace_limited_team_tokens(QuotaResource.EVENTS, {self.team.api_token: timezone.now().timestamp() + 10000}) + replace_limited_team_tokens( + QuotaResource.RECORDINGS, + {self.team.api_token: timezone.now().timestamp() + 10000}, + ) + replace_limited_team_tokens( + QuotaResource.EVENTS, + {self.team.api_token: timezone.now().timestamp() + 10000}, + ) self._send_session_recording_event() self.assertEqual(kafka_produce.call_count, 1) @@ -1371,8 +1578,20 @@ def _produce_events(): data={ "data": json.dumps( [ - {"event": "beep", "properties": {"distinct_id": "eeee", "token": self.team.api_token}}, - {"event": "boop", "properties": {"distinct_id": "aaaa", "token": self.team.api_token}}, + { + "event": "beep", + "properties": { + "distinct_id": "eeee", + "token": self.team.api_token, + }, + }, + { + "event": "boop", + "properties": { + "distinct_id": "aaaa", + "token": self.team.api_token, + }, + }, ] ), "api_key": self.team.api_token, @@ -1390,20 +1609,28 @@ def _produce_events(): ], ) - replace_limited_team_tokens(QuotaResource.EVENTS, {self.team.api_token: timezone.now().timestamp() + 10000}) + replace_limited_team_tokens( + QuotaResource.EVENTS, + {self.team.api_token: timezone.now().timestamp() + 10000}, + ) _produce_events() self.assertEqual(kafka_produce.call_count, 1) # Only the recording event replace_limited_team_tokens( - QuotaResource.RECORDINGS, {self.team.api_token: timezone.now().timestamp() + 10000} + QuotaResource.RECORDINGS, + {self.team.api_token: timezone.now().timestamp() + 10000}, ) _produce_events() self.assertEqual(kafka_produce.call_count, 0) # No events replace_limited_team_tokens( - QuotaResource.RECORDINGS, {self.team.api_token: timezone.now().timestamp() - 10000} + QuotaResource.RECORDINGS, + {self.team.api_token: timezone.now().timestamp() - 10000}, + ) + replace_limited_team_tokens( + QuotaResource.EVENTS, + {self.team.api_token: timezone.now().timestamp() - 10000}, ) - replace_limited_team_tokens(QuotaResource.EVENTS, {self.team.api_token: timezone.now().timestamp() - 10000}) _produce_events() self.assertEqual(kafka_produce.call_count, 3) # All events as limit-until timestamp is in the past @@ -1424,8 +1651,18 @@ def test_capture_historical_analytics_events(self, kafka_produce) -> None: "distinct_id": 2, "token": self.team.api_token, "$elements": [ - {"tag_name": "a", "nth_child": 1, "nth_of_type": 2, "attr__class": "btn btn-sm"}, - {"tag_name": "div", "nth_child": 1, "nth_of_type": 2, "$el_text": "💻"}, + { + "tag_name": "a", + "nth_child": 1, + "nth_of_type": 2, + "attr__class": "btn btn-sm", + }, + { + "tag_name": "div", + "nth_child": 1, + "nth_of_type": 2, + "$el_text": "💻", + }, ], }, } @@ -1433,4 +1670,7 @@ def test_capture_historical_analytics_events(self, kafka_produce) -> None: }, ) self.assertEqual(kafka_produce.call_count, 1) - self.assertEqual(kafka_produce.call_args_list[0][1]["topic"], KAFKA_EVENTS_PLUGIN_INGESTION_HISTORICAL) + self.assertEqual( + kafka_produce.call_args_list[0][1]["topic"], + KAFKA_EVENTS_PLUGIN_INGESTION_HISTORICAL, + ) diff --git a/posthog/api/test/test_cohort.py b/posthog/api/test/test_cohort.py index 78fc35ac64d6b..2d9082ecb5cb0 100644 --- a/posthog/api/test/test_cohort.py +++ b/posthog/api/test/test_cohort.py @@ -58,7 +58,12 @@ def test_creating_update_and_calculating(self, patch_calculate_cohort, patch_cap { "filters": { "type": "OR", - "values": [{"type": "AND", "values": [{"key": "team_id", "value": 5, "type": "person"}]}], + "values": [ + { + "type": "AND", + "values": [{"key": "team_id", "value": 5, "type": "person"}], + } + ], }, "name_length": 8, "groups_count": 1, @@ -91,7 +96,12 @@ def test_creating_update_and_calculating(self, patch_calculate_cohort, patch_cap { "filters": { "type": "OR", - "values": [{"type": "AND", "values": [{"key": "team_id", "value": 6, "type": "person"}]}], + "values": [ + { + "type": "AND", + "values": [{"key": "team_id", "value": 6, "type": "person"}], + } + ], }, "name_length": 9, "groups_count": 1, @@ -193,7 +203,9 @@ def test_static_cohort_csv_upload(self, patch_calculate_cohort_from_list): # Only change name without updating CSV response = client.patch( - f"/api/projects/{self.team.id}/cohorts/{response.json()['id']}", {"name": "test2"}, format="multipart" + f"/api/projects/{self.team.id}/cohorts/{response.json()['id']}", + {"name": "test2"}, + format="multipart", ) self.assertEqual(response.status_code, 200) @@ -235,7 +247,10 @@ def test_static_cohort_to_dynamic_cohort(self, patch_calculate_cohort, patch_cal response = self.client.patch( f"/api/projects/{self.team.id}/cohorts/{response.json()['id']}", - {"is_static": False, "groups": [{"properties": [{"key": "email", "value": "email@example.org"}]}]}, + { + "is_static": False, + "groups": [{"properties": [{"key": "email", "value": "email@example.org"}]}], + }, ) self.assertEqual(response.status_code, 200) self.assertEqual(patch_calculate_cohort.call_count, 1) @@ -247,7 +262,8 @@ def test_cohort_list(self): Person.objects.create(team=self.team, properties={"prop": 6}) self.client.post( - f"/api/projects/{self.team.id}/cohorts", data={"name": "whatever", "groups": [{"properties": {"prop": 5}}]} + f"/api/projects/{self.team.id}/cohorts", + data={"name": "whatever", "groups": [{"properties": {"prop": 5}}]}, ) response = self.client.get(f"/api/projects/{self.team.id}/cohorts").json() @@ -284,11 +300,16 @@ def test_csv_export_new(self): def test_filter_by_cohort(self): _create_person(team=self.team, distinct_ids=[f"fake"], properties={}) for i in range(150): - _create_person(team=self.team, distinct_ids=[f"person_{i}"], properties={"$os": "Chrome"}) + _create_person( + team=self.team, + distinct_ids=[f"person_{i}"], + properties={"$os": "Chrome"}, + ) flush_persons_and_events() cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}] + team=self.team, + groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}], ) cohort.calculate_people_ch(pending_version=0) @@ -300,12 +321,21 @@ def test_filter_by_cohort(self): def test_filter_by_cohort_prop(self): for i in range(5): - _create_person(team=self.team, distinct_ids=[f"person_{i}"], properties={"$os": "Chrome"}) + _create_person( + team=self.team, + distinct_ids=[f"person_{i}"], + properties={"$os": "Chrome"}, + ) - _create_person(team=self.team, distinct_ids=[f"target"], properties={"$os": "Chrome", "$browser": "Safari"}) + _create_person( + team=self.team, + distinct_ids=[f"target"], + properties={"$os": "Chrome", "$browser": "Safari"}, + ) cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}] + team=self.team, + groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}], ) cohort.calculate_people_ch(pending_version=0) @@ -319,15 +349,26 @@ def test_filter_by_cohort_prop(self): @patch("posthog.api.person.posthoganalytics.feature_enabled", return_value=True) def test_filter_by_cohort_prop_from_clickhouse(self, patch_feature_enabled): for i in range(5): - _create_person(team=self.team, distinct_ids=[f"person_{i}"], properties={"$os": "Chrome"}) + _create_person( + team=self.team, + distinct_ids=[f"person_{i}"], + properties={"$os": "Chrome"}, + ) - _create_person(team=self.team, distinct_ids=[f"target"], properties={"$os": "Chrome", "$browser": "Safari"}) _create_person( - team=self.team, distinct_ids=[f"not_target"], properties={"$os": "Something else", "$browser": "Safari"} + team=self.team, + distinct_ids=[f"target"], + properties={"$os": "Chrome", "$browser": "Safari"}, + ) + _create_person( + team=self.team, + distinct_ids=[f"not_target"], + properties={"$os": "Something else", "$browser": "Safari"}, ) cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}] + team=self.team, + groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}], ) cohort.calculate_people_ch(pending_version=0) @@ -339,13 +380,22 @@ def test_filter_by_cohort_prop_from_clickhouse(self, patch_feature_enabled): def test_filter_by_cohort_search(self): for i in range(5): - _create_person(team=self.team, distinct_ids=[f"person_{i}"], properties={"$os": "Chrome"}) + _create_person( + team=self.team, + distinct_ids=[f"person_{i}"], + properties={"$os": "Chrome"}, + ) - _create_person(team=self.team, distinct_ids=[f"target"], properties={"$os": "Chrome", "$browser": "Safari"}) + _create_person( + team=self.team, + distinct_ids=[f"target"], + properties={"$os": "Chrome", "$browser": "Safari"}, + ) flush_persons_and_events() cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}] + team=self.team, + groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}], ) cohort.calculate_people_ch(pending_version=0) @@ -381,7 +431,17 @@ def test_creating_update_and_calculating_with_cycle(self, patch_calculate_cohort f"/api/projects/{self.team.id}/cohorts", data={ "name": "cohort B", - "groups": [{"properties": [{"type": "cohort", "value": response_a.json()["id"], "key": "id"}]}], + "groups": [ + { + "properties": [ + { + "type": "cohort", + "value": response_a.json()["id"], + "key": "id", + } + ] + } + ], }, ) self.assertEqual(patch_calculate_cohort.call_count, 2) @@ -391,7 +451,17 @@ def test_creating_update_and_calculating_with_cycle(self, patch_calculate_cohort f"/api/projects/{self.team.id}/cohorts", data={ "name": "cohort C", - "groups": [{"properties": [{"type": "cohort", "value": response_b.json()["id"], "key": "id"}]}], + "groups": [ + { + "properties": [ + { + "type": "cohort", + "value": response_b.json()["id"], + "key": "id", + } + ] + } + ], }, ) self.assertEqual(patch_calculate_cohort.call_count, 3) @@ -401,12 +471,26 @@ def test_creating_update_and_calculating_with_cycle(self, patch_calculate_cohort f"/api/projects/{self.team.id}/cohorts/{response_a.json()['id']}", data={ "name": "Cohort A, reloaded", - "groups": [{"properties": [{"type": "cohort", "value": response_c.json()["id"], "key": "id"}]}], + "groups": [ + { + "properties": [ + { + "type": "cohort", + "value": response_c.json()["id"], + "key": "id", + } + ] + } + ], }, ) self.assertEqual(response.status_code, 400, response.content) self.assertDictContainsSubset( - {"detail": "Cohorts cannot reference other cohorts in a loop.", "type": "validation_error"}, response.json() + { + "detail": "Cohorts cannot reference other cohorts in a loop.", + "type": "validation_error", + }, + response.json(), ) self.assertEqual(patch_calculate_cohort.call_count, 3) @@ -415,12 +499,26 @@ def test_creating_update_and_calculating_with_cycle(self, patch_calculate_cohort f"/api/projects/{self.team.id}/cohorts/{response_a.json()['id']}", data={ "name": "Cohort A, reloaded", - "groups": [{"properties": [{"type": "cohort", "value": response_a.json()["id"], "key": "id"}]}], + "groups": [ + { + "properties": [ + { + "type": "cohort", + "value": response_a.json()["id"], + "key": "id", + } + ] + } + ], }, ) self.assertEqual(response.status_code, 400, response.content) self.assertDictContainsSubset( - {"detail": "Cohorts cannot reference other cohorts in a loop.", "type": "validation_error"}, response.json() + { + "detail": "Cohorts cannot reference other cohorts in a loop.", + "type": "validation_error", + }, + response.json(), ) self.assertEqual(patch_calculate_cohort.call_count, 3) @@ -439,7 +537,17 @@ def test_creating_update_with_non_directed_cycle(self, patch_calculate_cohort, p f"/api/projects/{self.team.id}/cohorts", data={ "name": "cohort B", - "groups": [{"properties": [{"type": "cohort", "value": response_a.json()["id"], "key": "id"}]}], + "groups": [ + { + "properties": [ + { + "type": "cohort", + "value": response_a.json()["id"], + "key": "id", + } + ] + } + ], }, ) self.assertEqual(patch_calculate_cohort.call_count, 2) @@ -452,8 +560,16 @@ def test_creating_update_with_non_directed_cycle(self, patch_calculate_cohort, p "groups": [ { "properties": [ - {"type": "cohort", "value": response_b.json()["id"], "key": "id"}, - {"type": "cohort", "value": response_a.json()["id"], "key": "id"}, + { + "type": "cohort", + "value": response_b.json()["id"], + "key": "id", + }, + { + "type": "cohort", + "value": response_a.json()["id"], + "key": "id", + }, ] } ], @@ -492,25 +608,47 @@ def test_creating_update_and_calculating_with_invalid_cohort(self, patch_calcula ) self.assertEqual(response.status_code, 400, response.content) self.assertDictContainsSubset( - {"detail": "Invalid Cohort ID in filter", "type": "validation_error"}, response.json() + {"detail": "Invalid Cohort ID in filter", "type": "validation_error"}, + response.json(), ) self.assertEqual(patch_calculate_cohort.call_count, 1) @patch("posthog.api.cohort.report_user_action") def test_creating_update_and_calculating_with_new_cohort_filters(self, patch_capture): - _create_person(distinct_ids=["p1"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["p1"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p1", timestamp=datetime.now() - timedelta(hours=12) + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp=datetime.now() - timedelta(hours=12), ) - _create_person(distinct_ids=["p2"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p2"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p2", timestamp=datetime.now() - timedelta(hours=12) + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp=datetime.now() - timedelta(hours=12), ) - _create_person(distinct_ids=["p3"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p3"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p3", timestamp=datetime.now() - timedelta(days=12) + team=self.team, + event="$pageview", + distinct_id="p3", + timestamp=datetime.now() - timedelta(days=12), ) flush_persons_and_events() @@ -523,7 +661,11 @@ def test_creating_update_and_calculating_with_new_cohort_filters(self, patch_cap "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, { "key": "$pageview", "event_type": "events", @@ -552,9 +694,21 @@ def test_creating_update_and_calculating_with_new_cohort_filters(self, patch_cap def test_cohort_with_is_set_filter_missing_value(self, patch_capture): # regression test: Removing `value` was silently failing - _create_person(distinct_ids=["p1"], team_id=self.team.pk, properties={"$some_prop": "something"}) - _create_person(distinct_ids=["p2"], team_id=self.team.pk, properties={"$some_prop": "not it"}) - _create_person(distinct_ids=["p3"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p1"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) + _create_person( + distinct_ids=["p2"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) + _create_person( + distinct_ids=["p3"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_person(distinct_ids=["p4"], team_id=self.team.pk, properties={}) flush_persons_and_events() @@ -565,7 +719,13 @@ def test_cohort_with_is_set_filter_missing_value(self, patch_capture): "filters": { "properties": { "type": "OR", - "values": [{"key": "$some_prop", "type": "person", "operator": "is_set"}], + "values": [ + { + "key": "$some_prop", + "type": "person", + "operator": "is_set", + } + ], } }, }, @@ -597,12 +757,19 @@ def test_creating_update_and_calculating_ignore_bad_filters(self, patch_calculat update_response = self.client.patch( f"/api/projects/{self.team.id}/cohorts/{response.json()['id']}", - data={"name": "whatever", "filters": "[Slkasd=lkxcn]", "groups": [{"properties": {"team_id": 5}}]}, + data={ + "name": "whatever", + "filters": "[Slkasd=lkxcn]", + "groups": [{"properties": {"team_id": 5}}], + }, ) self.assertEqual(update_response.status_code, 400, response.content) self.assertDictContainsSubset( - {"detail": "Filters must be a dictionary with a 'properties' key.", "type": "validation_error"}, + { + "detail": "Filters must be a dictionary with a 'properties' key.", + "type": "validation_error", + }, update_response.json(), ) @@ -632,7 +799,11 @@ def test_update_cohort_used_in_flags(self, patch_calculate_cohort, patch_capture "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, ], } }, @@ -679,7 +850,11 @@ def test_update_cohort_used_in_flags(self, patch_calculate_cohort, patch_capture "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, { "key": "$pageview", "event_type": "events", @@ -712,7 +887,11 @@ def test_update_cohort_used_in_flags(self, patch_calculate_cohort, patch_capture "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, { "key": "id", "value": second_cohort_pk, @@ -736,19 +915,40 @@ def test_update_cohort_used_in_flags(self, patch_calculate_cohort, patch_capture @patch("posthog.api.cohort.report_user_action") def test_duplicating_dynamic_cohort_as_static(self, patch_capture): - _create_person(distinct_ids=["p1"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["p1"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p1", timestamp=datetime.now() - timedelta(hours=12) + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp=datetime.now() - timedelta(hours=12), ) - _create_person(distinct_ids=["p2"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p2"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p2", timestamp=datetime.now() - timedelta(hours=12) + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp=datetime.now() - timedelta(hours=12), ) - _create_person(distinct_ids=["p3"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p3"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p3", timestamp=datetime.now() - timedelta(days=12) + team=self.team, + event="$pageview", + distinct_id="p3", + timestamp=datetime.now() - timedelta(days=12), ) flush_persons_and_events() @@ -761,7 +961,11 @@ def test_duplicating_dynamic_cohort_as_static(self, patch_capture): "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, { "key": "$pageview", "event_type": "events", @@ -802,19 +1006,40 @@ def test_duplicating_dynamic_cohort_as_static(self, patch_capture): @snapshot_clickhouse_queries @patch("posthog.api.cohort.report_user_action") def test_async_deletion_of_cohort(self, patch_capture): - _create_person(distinct_ids=["p1"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["p1"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p1", timestamp=datetime.now() - timedelta(hours=12) + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp=datetime.now() - timedelta(hours=12), ) - _create_person(distinct_ids=["p2"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p2"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p2", timestamp=datetime.now() - timedelta(hours=12) + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp=datetime.now() - timedelta(hours=12), ) - _create_person(distinct_ids=["p3"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p3"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p3", timestamp=datetime.now() - timedelta(days=12) + team=self.team, + event="$pageview", + distinct_id="p3", + timestamp=datetime.now() - timedelta(days=12), ) flush_persons_and_events() @@ -827,7 +1052,11 @@ def test_async_deletion_of_cohort(self, patch_capture): "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, { "key": "$pageview", "event_type": "events", @@ -858,7 +1087,11 @@ def test_async_deletion_of_cohort(self, patch_capture): "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, ], } }, @@ -891,7 +1124,10 @@ def test_async_deletion_of_cohort(self, patch_capture): sync_execute(f"OPTIMIZE TABLE cohortpeople FINAL SETTINGS mutations_sync = 2") # check clickhouse data is gone from cohortpeople table - res = sync_execute("SELECT count() FROM cohortpeople WHERE cohort_id = %(cohort_id)s", {"cohort_id": cohort_id}) + res = sync_execute( + "SELECT count() FROM cohortpeople WHERE cohort_id = %(cohort_id)s", + {"cohort_id": cohort_id}, + ) self.assertEqual(res[0][0], 1) # now let's ensure verification of deletion happens on next run @@ -929,19 +1165,40 @@ def test_deletion_of_cohort_cancels_async_deletion(self): @patch("posthog.api.cohort.report_user_action") def test_async_deletion_of_cohort_with_race_condition_multiple_updates(self, patch_capture): - _create_person(distinct_ids=["p1"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["p1"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p1", timestamp=datetime.now() - timedelta(hours=12) + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp=datetime.now() - timedelta(hours=12), ) - _create_person(distinct_ids=["p2"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p2"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p2", timestamp=datetime.now() - timedelta(hours=12) + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp=datetime.now() - timedelta(hours=12), ) - _create_person(distinct_ids=["p3"], team_id=self.team.pk, properties={"$some_prop": "not it"}) + _create_person( + distinct_ids=["p3"], + team_id=self.team.pk, + properties={"$some_prop": "not it"}, + ) _create_event( - team=self.team, event="$pageview", distinct_id="p3", timestamp=datetime.now() - timedelta(days=12) + team=self.team, + event="$pageview", + distinct_id="p3", + timestamp=datetime.now() - timedelta(days=12), ) flush_persons_and_events() @@ -954,7 +1211,11 @@ def test_async_deletion_of_cohort_with_race_condition_multiple_updates(self, pat "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, { "key": "$pageview", "event_type": "events", @@ -985,7 +1246,11 @@ def test_async_deletion_of_cohort_with_race_condition_multiple_updates(self, pat "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something", "type": "person"}, + { + "key": "$some_prop", + "value": "something", + "type": "person", + }, ], } }, @@ -1000,7 +1265,11 @@ def test_async_deletion_of_cohort_with_race_condition_multiple_updates(self, pat "properties": { "type": "OR", "values": [ - {"key": "$some_prop", "value": "something2", "type": "person"}, + { + "key": "$some_prop", + "value": "something2", + "type": "person", + }, ], } }, @@ -1043,7 +1312,10 @@ def test_async_deletion_of_cohort_with_race_condition_multiple_updates(self, pat # check clickhouse data is gone from cohortpeople table # Without async deletions, this number would've been 5, because of extra random stuff being added to cohortpeople table # due to the racy calls to update cohort - res = sync_execute("SELECT count() FROM cohortpeople WHERE cohort_id = %(cohort_id)s", {"cohort_id": cohort_id}) + res = sync_execute( + "SELECT count() FROM cohortpeople WHERE cohort_id = %(cohort_id)s", + {"cohort_id": cohort_id}, + ) self.assertEqual(res[0][0], 2) # now let's ensure verification of deletion happens on next run diff --git a/posthog/api/test/test_decide.py b/posthog/api/test/test_decide.py index e3e13458acb54..78019779fb5f1 100644 --- a/posthog/api/test/test_decide.py +++ b/posthog/api/test/test_decide.py @@ -20,7 +20,15 @@ from posthog.api.test.test_feature_flag import QueryTimeoutWrapper from posthog.api.decide import label_for_team_id_to_track -from posthog.models import FeatureFlag, GroupTypeMapping, Person, PersonalAPIKey, Plugin, PluginConfig, PluginSourceFile +from posthog.models import ( + FeatureFlag, + GroupTypeMapping, + Person, + PersonalAPIKey, + Plugin, + PluginConfig, + PluginSourceFile, +) from posthog.models.cohort.cohort import Cohort from posthog.models.organization import Organization, OrganizationMembership from posthog.models.personal_api_key import hash_key_value @@ -29,12 +37,20 @@ from posthog.models.person import PersonDistinctId from posthog.models.user import User from posthog.models.utils import generate_random_token_personal -from posthog.test.base import BaseTest, QueryMatchingTest, snapshot_postgres_queries, snapshot_postgres_queries_context +from posthog.test.base import ( + BaseTest, + QueryMatchingTest, + snapshot_postgres_queries, + snapshot_postgres_queries_context, +) from posthog.database_healthcheck import postgres_healthcheck from posthog import redis -@patch("posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", return_value=True) +@patch( + "posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", + return_value=True, +) class TestDecide(BaseTest, QueryMatchingTest): """ Tests the `/decide` endpoint. @@ -109,7 +125,15 @@ def test_defaults_to_v2_if_conflicting_parameters(self, *args): response = self.client.post( f"/decide/?v=2&v=1.19.0", - {"data": self._dict_to_b64({"token": self.team.api_token, "distinct_id": "example_id", "groups": {}})}, + { + "data": self._dict_to_b64( + { + "token": self.team.api_token, + "distinct_id": "example_id", + "groups": {}, + } + ) + }, HTTP_ORIGIN="http://127.0.0.1:8000", ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -267,7 +291,10 @@ def test_session_recording_empty_linked_flag(self, *args): response = self._post_decide().json() assert response["sessionRecording"]["linkedFlag"] is None - self._update_team({"session_recording_linked_flag": {}}, expected_status_code=status.HTTP_400_BAD_REQUEST) + self._update_team( + {"session_recording_linked_flag": {}}, + expected_status_code=status.HTTP_400_BAD_REQUEST, + ) def test_exception_autocapture_opt_in(self, *args): # :TRICKY: Test for regression around caching @@ -277,7 +304,10 @@ def test_exception_autocapture_opt_in(self, *args): self._update_team({"autocapture_exceptions_opt_in": True}) response = self._post_decide().json() - self.assertEqual(response["autocaptureExceptions"], {"errors_to_ignore": [], "endpoint": "/e/"}) + self.assertEqual( + response["autocaptureExceptions"], + {"errors_to_ignore": [], "endpoint": "/e/"}, + ) def test_exception_autocapture_errors_to_ignore(self, *args): # :TRICKY: Test for regression around caching @@ -286,13 +316,21 @@ def test_exception_autocapture_errors_to_ignore(self, *args): self._update_team({"autocapture_exceptions_opt_in": True}) self._update_team( - {"autocapture_exceptions_errors_to_ignore": ["ResizeObserver loop limit exceeded", ".* bot .*"]} + { + "autocapture_exceptions_errors_to_ignore": [ + "ResizeObserver loop limit exceeded", + ".* bot .*", + ] + } ) response = self._post_decide().json() self.assertEqual( response["autocaptureExceptions"], - {"errors_to_ignore": ["ResizeObserver loop limit exceeded", ".* bot .*"], "endpoint": "/e/"}, + { + "errors_to_ignore": ["ResizeObserver loop limit exceeded", ".* bot .*"], + "endpoint": "/e/", + }, ) def test_user_session_recording_opt_in_wildcard_domain(self, *args): @@ -300,7 +338,12 @@ def test_user_session_recording_opt_in_wildcard_domain(self, *args): response = self._post_decide().json() self.assertEqual(response["sessionRecording"], False) - self._update_team({"session_recording_opt_in": True, "recording_domains": ["https://*.example.com"]}) + self._update_team( + { + "session_recording_opt_in": True, + "recording_domains": ["https://*.example.com"], + } + ) response = self._post_decide(origin="https://random.example.com").json() assert response["sessionRecording"] == { @@ -318,7 +361,12 @@ def test_user_session_recording_opt_in_wildcard_domain(self, *args): self.assertEqual(response["sessionRecording"], False) def test_user_session_recording_evil_site(self, *args): - self._update_team({"session_recording_opt_in": True, "recording_domains": ["https://example.com"]}) + self._update_team( + { + "session_recording_opt_in": True, + "recording_domains": ["https://example.com"], + } + ) response = self._post_decide(origin="evil.site.com").json() assert response["sessionRecording"] is False @@ -357,7 +405,12 @@ def test_user_session_recording_allowed_when_no_permitted_domains_are_set(self, } def test_user_session_recording_allowed_when_permitted_domains_are_not_http_based(self, *args): - self._update_team({"session_recording_opt_in": True, "recording_domains": ["capacitor://localhost"]}) + self._update_team( + { + "session_recording_opt_in": True, + "recording_domains": ["capacitor://localhost"], + } + ) response = self._post_decide(origin="capacitor://localhost:8000/home").json() assert response["sessionRecording"] == { @@ -384,7 +437,12 @@ def test_web_app_queries(self, *args): status=PluginSourceFile.Status.TRANSPILED, ) PluginConfig.objects.create( - plugin=plugin, enabled=True, order=1, team=self.team, config={}, web_token="tokentoken" + plugin=plugin, + enabled=True, + order=1, + team=self.team, + config={}, + web_token="tokentoken", ) sync_team_inject_web_apps(self.team) @@ -406,7 +464,12 @@ def test_site_app_injection(self, *args): status=PluginSourceFile.Status.TRANSPILED, ) plugin_config = PluginConfig.objects.create( - plugin=plugin, enabled=True, order=1, team=self.team, config={}, web_token="tokentoken" + plugin=plugin, + enabled=True, + order=1, + team=self.team, + config={}, + web_token="tokentoken", ) self.team.refresh_from_db() self.assertTrue(self.team.inject_web_apps) @@ -421,9 +484,17 @@ def test_feature_flags(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) FeatureFlag.objects.create( team=self.team, @@ -444,7 +515,19 @@ def test_feature_flags(self, *args): ) FeatureFlag.objects.create( team=self.team, - filters={"groups": [{"properties": [{"key": "email", "value": "tim@posthog.com", "type": "person"}]}]}, + filters={ + "groups": [ + { + "properties": [ + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + } + ] + } + ] + }, name="Filter by property 2", key="filer-by-property-2", created_by=self.user, @@ -467,14 +550,24 @@ def test_feature_flags_v3_json(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) FeatureFlag.objects.create( team=self.team, filters={ "groups": [ { - "properties": [{"key": "email", "value": "tim@posthog.com", "type": "person"}], + "properties": [ + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + } + ], "rollout_percentage": None, } ], @@ -489,15 +582,26 @@ def test_feature_flags_v3_json(self, *args): response = self._post_decide(api_version=3) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual({"color": "blue"}, response.json()["featureFlagPayloads"]["filter-by-property"]) + self.assertEqual( + {"color": "blue"}, + response.json()["featureFlagPayloads"]["filter-by-property"], + ) def test_feature_flags_v3_json_multivariate(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) FeatureFlag.objects.create( team=self.team, @@ -512,9 +616,21 @@ def test_feature_flags_v3_json_multivariate(self, *args): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, "payloads": {"first-variant": {"color": "blue"}}, @@ -535,15 +651,26 @@ def test_feature_flags_v3_json_multivariate(self, *args): response = self._post_decide(api_version=3) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual("first-variant", response.json()["featureFlags"]["multivariate-flag"]) - self.assertEqual({"color": "blue"}, response.json()["featureFlagPayloads"]["multivariate-flag"]) + self.assertEqual( + {"color": "blue"}, + response.json()["featureFlagPayloads"]["multivariate-flag"], + ) def test_feature_flags_v2(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) FeatureFlag.objects.create( team=self.team, @@ -558,9 +685,21 @@ def test_feature_flags_v2(self, *args): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -596,7 +735,11 @@ def test_feature_flags_v2_with_property_overrides(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"$geoip_country_name": "India"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"$geoip_country_name": "India"}, + ) Person.objects.create(team=self.team, distinct_ids=["other_id"], properties={}) australia_ip = "13.106.122.3" @@ -610,7 +753,13 @@ def test_feature_flags_v2_with_property_overrides(self, *args): filters={ "groups": [ { - "properties": [{"key": "$geoip_country_name", "value": "Australia", "type": "person"}], + "properties": [ + { + "key": "$geoip_country_name", + "value": "Australia", + "type": "person", + } + ], "rollout_percentage": 100, } ] @@ -621,15 +770,33 @@ def test_feature_flags_v2_with_property_overrides(self, *args): filters={ "groups": [ { - "properties": [{"key": "$geoip_country_name", "value": "India", "type": "person"}], + "properties": [ + { + "key": "$geoip_country_name", + "value": "India", + "type": "person", + } + ], "rollout_percentage": None, } ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -653,7 +820,11 @@ def test_feature_flags_v2_with_geoip_error(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"$geoip_country_name": "India"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"$geoip_country_name": "India"}, + ) Person.objects.create(team=self.team, distinct_ids=["other_id"], properties={}) FeatureFlag.objects.create( @@ -665,7 +836,13 @@ def test_feature_flags_v2_with_geoip_error(self, *args): filters={ "groups": [ { - "properties": [{"key": "$geoip_country_name", "value": "Australia", "type": "person"}], + "properties": [ + { + "key": "$geoip_country_name", + "value": "Australia", + "type": "person", + } + ], "rollout_percentage": 100, } ] @@ -676,15 +853,33 @@ def test_feature_flags_v2_with_geoip_error(self, *args): filters={ "groups": [ { - "properties": [{"key": "$geoip_country_name", "value": "India", "type": "person"}], + "properties": [ + { + "key": "$geoip_country_name", + "value": "India", + "type": "person", + } + ], "rollout_percentage": None, } ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -710,7 +905,9 @@ def test_feature_flags_v2_consistent_flags(self, *args): self.team.save() self.client.logout() person = Person.objects.create( - team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"} + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, ) FeatureFlag.objects.create( team=self.team, @@ -733,9 +930,21 @@ def test_feature_flags_v2_consistent_flags(self, *args): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -762,7 +971,11 @@ def test_feature_flags_v2_consistent_flags(self, *args): with self.assertNumQueries(13): response = self._post_decide( api_version=2, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -774,9 +987,17 @@ def test_feature_flags_v3_consistent_flags_with_numeric_distinct_ids(self, *args self.team.app_urls = ["https://example.com"] self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) Person.objects.create(team=self.team, distinct_ids=[1], properties={"email": "tim@posthog.com"}) - Person.objects.create(team=self.team, distinct_ids=[12345, "xyz"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=[12345, "xyz"], + properties={"email": "tim@posthog.com"}, + ) FeatureFlag.objects.create( team=self.team, rollout_percentage=30, @@ -802,7 +1023,11 @@ def test_feature_flags_v3_consistent_flags_with_numeric_distinct_ids(self, *args with self.assertNumQueries(13): response = self._post_decide( api_version=2, - data={"token": self.team.api_token, "distinct_id": 12345, "$anon_distinct_id": "example_id"}, + data={ + "token": self.team.api_token, + "distinct_id": 12345, + "$anon_distinct_id": "example_id", + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -810,7 +1035,11 @@ def test_feature_flags_v3_consistent_flags_with_numeric_distinct_ids(self, *args with self.assertNumQueries(9): response = self._post_decide( api_version=2, - data={"token": self.team.api_token, "distinct_id": "xyz", "$anon_distinct_id": 12345}, + data={ + "token": self.team.api_token, + "distinct_id": "xyz", + "$anon_distinct_id": 12345, + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -818,7 +1047,11 @@ def test_feature_flags_v3_consistent_flags_with_numeric_distinct_ids(self, *args with self.assertNumQueries(9): response = self._post_decide( api_version=2, - data={"token": self.team.api_token, "distinct_id": 5, "$anon_distinct_id": 12345}, + data={ + "token": self.team.api_token, + "distinct_id": 5, + "$anon_distinct_id": 12345, + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -851,9 +1084,21 @@ def test_feature_flags_v2_consistent_flags_with_ingestion_delays(self, *args): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -878,7 +1123,11 @@ def test_feature_flags_v2_consistent_flags_with_ingestion_delays(self, *args): with self.assertNumQueries(12): response = self._post_decide( api_version=2, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, ) # self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -891,7 +1140,9 @@ def test_feature_flags_v2_consistent_flags_with_merged_persons(self, *args): self.team.save() self.client.logout() person = Person.objects.create( - team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"} + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, ) FeatureFlag.objects.create( team=self.team, @@ -914,9 +1165,21 @@ def test_feature_flags_v2_consistent_flags_with_merged_persons(self, *args): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -939,14 +1202,20 @@ def test_feature_flags_v2_consistent_flags_with_merged_persons(self, *args): # on identify, this will trigger a merge with person.id being deleted, and # `example_id` becoming a part of person2. person2 = Person.objects.create( - team=self.team, distinct_ids=["other_id"], properties={"email": "tim@posthog.com"} + team=self.team, + distinct_ids=["other_id"], + properties={"email": "tim@posthog.com"}, ) # caching flag definitions in the above mean fewer queries with self.assertNumQueries(13): response = self._post_decide( api_version=2, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -979,7 +1248,10 @@ def test_feature_flags_v2_consistent_flags_with_merged_persons(self, *args): # caching flag definitions in the above mean fewer queries with self.assertNumQueries(5): - response = self._post_decide(api_version=2, data={"token": self.team.api_token, "distinct_id": "other_id"}) + response = self._post_decide( + api_version=2, + data={"token": self.team.api_token, "distinct_id": "other_id"}, + ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) self.assertEqual( @@ -991,7 +1263,9 @@ def test_feature_flags_v2_consistent_flags_with_delayed_new_identified_person(se self.team.save() self.client.logout() person = Person.objects.create( - team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"} + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, ) FeatureFlag.objects.create( team=self.team, @@ -1014,9 +1288,21 @@ def test_feature_flags_v2_consistent_flags_with_delayed_new_identified_person(se "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1042,7 +1328,11 @@ def test_feature_flags_v2_consistent_flags_with_delayed_new_identified_person(se # one extra query to find person_id for $anon_distinct_id response = self._post_decide( api_version=2, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -1059,7 +1349,10 @@ def test_feature_flags_v2_consistent_flags_with_delayed_new_identified_person(se with self.assertNumQueries(4): # caching flag definitions in the above mean fewer queries - response = self._post_decide(api_version=2, data={"token": self.team.api_token, "distinct_id": "other_id"}) + response = self._post_decide( + api_version=2, + data={"token": self.team.api_token, "distinct_id": "other_id"}, + ) # self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) self.assertEqual("third-variant", response.json()["featureFlags"]["multivariate-flag"]) # variant changed @@ -1069,7 +1362,10 @@ def test_feature_flags_v2_consistent_flags_with_delayed_new_identified_person(se # caching flag definitions in the above mean fewer queries with self.assertNumQueries(5): - response = self._post_decide(api_version=2, data={"token": self.team.api_token, "distinct_id": "other_id"}) + response = self._post_decide( + api_version=2, + data={"token": self.team.api_token, "distinct_id": "other_id"}, + ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) self.assertEqual( @@ -1081,10 +1377,14 @@ def test_feature_flags_v2_complex(self, *args): self.team.save() self.client.logout() Person.objects.create( - team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com", "realm": "cloud"} + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com", "realm": "cloud"}, ) Person.objects.create( - team=self.team, distinct_ids=["hosted_id"], properties={"email": "sam@posthog.com", "realm": "hosted"} + team=self.team, + distinct_ids=["hosted_id"], + properties={"email": "sam@posthog.com", "realm": "hosted"}, ) FeatureFlag.objects.create( team=self.team, @@ -1097,14 +1397,33 @@ def test_feature_flags_v2_complex(self, *args): team=self.team, filters={ "groups": [ - {"properties": [{"key": "realm", "type": "person", "value": "cloud"}], "rollout_percentage": 80} + { + "properties": [{"key": "realm", "type": "person", "value": "cloud"}], + "rollout_percentage": 80, + } ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 25}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, - {"key": "fourth-variant", "name": "Fourth Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 25, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, + { + "key": "fourth-variant", + "name": "Fourth Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1144,7 +1463,11 @@ def test_feature_flags_v3(self, *args): self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) # use a non-csrf client to make requests to add feature flags client = Client() @@ -1183,9 +1506,21 @@ def test_feature_flags_v3(self, *args): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1224,7 +1559,11 @@ def test_feature_flags_v3_with_database_errors(self, mock_counter, *args): self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) # use a non-csrf client to make requests to add feature flags client = Client() @@ -1236,7 +1575,14 @@ def test_feature_flags_v3_with_database_errors(self, mock_counter, *args): "filters": { "groups": [ { - "properties": [{"key": "email", "value": "tim", "type": "person", "operator": "icontains"}], + "properties": [ + { + "key": "email", + "value": "tim", + "type": "person", + "operator": "icontains", + } + ], "rollout_percentage": 50, } ] @@ -1270,9 +1616,21 @@ def test_feature_flags_v3_with_database_errors(self, mock_counter, *args): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1315,7 +1673,11 @@ def test_feature_flags_v3_metric_counter(self, mock_error_counter, mock_counter, self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) # use a non-csrf client to make requests to add feature flags client = Client() @@ -1327,7 +1689,14 @@ def test_feature_flags_v3_metric_counter(self, mock_error_counter, mock_counter, "filters": { "groups": [ { - "properties": [{"key": "email", "value": "tim", "type": "person", "operator": "icontains"}], + "properties": [ + { + "key": "email", + "value": "tim", + "type": "person", + "operator": "icontains", + } + ], "rollout_percentage": 50, } ] @@ -1361,9 +1730,21 @@ def test_feature_flags_v3_metric_counter(self, mock_error_counter, mock_counter, "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1379,11 +1760,17 @@ def test_feature_flags_v3_metric_counter(self, mock_error_counter, mock_counter, # also adding team to cache response = self._post_decide( api_version=3, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, ) mock_counter.labels.assert_called_once_with( - team_id=str(self.team.pk), errors_computing=False, has_hash_key_override=True + team_id=str(self.team.pk), + errors_computing=False, + has_hash_key_override=True, ) mock_counter.labels.return_value.inc.assert_called_once() mock_error_counter.labels.assert_not_called() @@ -1398,12 +1785,15 @@ def test_feature_flags_v3_metric_counter(self, mock_error_counter, mock_counter, self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) self.assertEqual( - "first-variant", response.json()["featureFlags"]["multivariate-flag"] + "first-variant", + response.json()["featureFlags"]["multivariate-flag"], ) # assigned by distinct_id hash self.assertFalse(response.json()["errorsWhileComputingFlags"]) mock_counter.labels.assert_called_once_with( - team_id=str(self.team.pk), errors_computing=False, has_hash_key_override=False + team_id=str(self.team.pk), + errors_computing=False, + has_hash_key_override=False, ) mock_counter.labels.return_value.inc.assert_called_once() mock_error_counter.labels.assert_not_called() @@ -1420,7 +1810,9 @@ def test_feature_flags_v3_metric_counter(self, mock_error_counter, mock_counter, self.assertTrue(response.json()["errorsWhileComputingFlags"]) mock_counter.labels.assert_called_once_with( - team_id=str(self.team.pk), errors_computing=True, has_hash_key_override=False + team_id=str(self.team.pk), + errors_computing=True, + has_hash_key_override=False, ) mock_counter.labels.return_value.inc.assert_called_once() mock_error_counter.labels.assert_any_call(reason="healthcheck_failed") @@ -1434,7 +1826,11 @@ def test_feature_flags_v3_with_database_errors_and_no_flags(self, *args): self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) # adding team to cache self._post_decide(api_version=3) @@ -1526,7 +1922,9 @@ def test_feature_flags_v3_consistent_flags_with_database_errors(self, *args): self.team.save() self.client.logout() person = Person.objects.create( - team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"} + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, ) FeatureFlag.objects.create( team=self.team, @@ -1549,9 +1947,21 @@ def test_feature_flags_v3_consistent_flags_with_database_errors(self, *args): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1586,7 +1996,11 @@ def test_feature_flags_v3_consistent_flags_with_database_errors(self, *args): with connection.execute_wrapper(QueryTimeoutWrapper()): response = self._post_decide( api_version=3, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, ) self.assertTrue("beta-feature" not in response.json()["featureFlags"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -1600,11 +2014,16 @@ def test_feature_flags_v2_with_groups(self, *args): self.client.logout() GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) Person.objects.create( - team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com", "realm": "cloud"} + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com", "realm": "cloud"}, ) FeatureFlag.objects.create( team=self.team, - filters={"aggregation_group_type_index": 0, "groups": [{"rollout_percentage": 100}]}, + filters={ + "aggregation_group_type_index": 0, + "groups": [{"rollout_percentage": 100}], + }, name="This is a group-based flag", key="groups-flag", created_by=self.user, @@ -1624,10 +2043,19 @@ def test_feature_flags_with_personal_api_key(self, *args): PersonalAPIKey.objects.create(label="X", user=self.user, secure_value=hash_key_value(key_value)) Person.objects.create(team=self.team, distinct_ids=["example_id"]) FeatureFlag.objects.create( - team=self.team, rollout_percentage=100, name="Test", key="test", created_by=self.user + team=self.team, + rollout_percentage=100, + name="Test", + key="test", + created_by=self.user, ) FeatureFlag.objects.create( - team=self.team, rollout_percentage=100, name="Disabled", key="disabled", created_by=self.user, active=False + team=self.team, + rollout_percentage=100, + name="Disabled", + key="disabled", + created_by=self.user, + active=False, ) # disabled flag FeatureFlag.objects.create( team=self.team, @@ -1636,7 +2064,11 @@ def test_feature_flags_with_personal_api_key(self, *args): created_by=self.user, ) # enabled for everyone response = self._post_decide( - {"distinct_id": "example_id", "api_key": key_value, "project_id": self.team.id} + { + "distinct_id": "example_id", + "api_key": key_value, + "project_id": self.team.id, + } ).json() self.assertEqual(response["featureFlags"], ["test", "default-flag"]) @@ -1646,10 +2078,24 @@ def test_flag_with_regular_cohorts(self, *args): self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id_1"], properties={"$some_prop_1": "something_1"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id_1"], + properties={"$some_prop_1": "something_1"}, + ) cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "$some_prop_1", "value": "something_1", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "$some_prop_1", + "value": "something_1", + "type": "person", + } + ] + } + ], name="cohort1", ) # no calculation for cohort @@ -1679,12 +2125,24 @@ def test_flag_with_behavioural_cohorts(self, *args): self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id_1"], properties={"$some_prop_1": "something_1"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id_1"], + properties={"$some_prop_1": "something_1"}, + ) cohort = Cohort.objects.create( team=self.team, groups=[ {"event_id": "$pageview", "days": 7}, - {"properties": [{"key": "$some_prop_1", "value": "something_1", "type": "person"}]}, + { + "properties": [ + { + "key": "$some_prop_1", + "value": "something_1", + "type": "person", + } + ] + }, ], name="cohort1", ) @@ -1728,20 +2186,31 @@ def test_personal_api_key_without_project_id(self, *args): def test_missing_token(self, *args): Person.objects.create(team=self.team, distinct_ids=["example_id"]) FeatureFlag.objects.create( - team=self.team, rollout_percentage=100, name="Test", key="test", created_by=self.user + team=self.team, + rollout_percentage=100, + name="Test", + key="test", + created_by=self.user, ) response = self._post_decide({"distinct_id": "example_id", "api_key": None, "project_id": self.team.id}) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) def test_invalid_payload_on_decide_endpoint(self, *args): - invalid_payloads = [base64.b64encode(b"1-1").decode("utf-8"), "1==1", "{distinct_id-1}"] + invalid_payloads = [ + base64.b64encode(b"1-1").decode("utf-8"), + "1==1", + "{distinct_id-1}", + ] for payload in invalid_payloads: response = self.client.post("/decide/", {"data": payload}, HTTP_ORIGIN="http://127.0.0.1:8000") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response_data = response.json() detail = response_data.pop("detail") - self.assertEqual(response.json(), {"type": "validation_error", "code": "malformed_data", "attr": None}) + self.assertEqual( + response.json(), + {"type": "validation_error", "code": "malformed_data", "attr": None}, + ) self.assertIn("Malformed request data:", detail) def test_invalid_gzip_payload_on_decide_endpoint(self, *args): @@ -1754,7 +2223,10 @@ def test_invalid_gzip_payload_on_decide_endpoint(self, *args): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response_data = response.json() detail = response_data.pop("detail") - self.assertEqual(response.json(), {"type": "validation_error", "code": "malformed_data", "attr": None}) + self.assertEqual( + response.json(), + {"type": "validation_error", "code": "malformed_data", "attr": None}, + ) self.assertIn("Malformed request data:", detail) def test_geoip_disable(self, *args): @@ -1762,7 +2234,11 @@ def test_geoip_disable(self, *args): self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"$geoip_country_name": "India"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"$geoip_country_name": "India"}, + ) australia_ip = "13.106.122.3" @@ -1775,7 +2251,13 @@ def test_geoip_disable(self, *args): filters={ "groups": [ { - "properties": [{"key": "$geoip_country_name", "value": "Australia", "type": "person"}], + "properties": [ + { + "key": "$geoip_country_name", + "value": "Australia", + "type": "person", + } + ], "rollout_percentage": 100, } ] @@ -1791,7 +2273,13 @@ def test_geoip_disable(self, *args): filters={ "groups": [ { - "properties": [{"key": "$geoip_country_name", "value": "India", "type": "person"}], + "properties": [ + { + "key": "$geoip_country_name", + "value": "India", + "type": "person", + } + ], "rollout_percentage": 100, } ] @@ -1804,11 +2292,13 @@ def test_geoip_disable(self, *args): # person has geoip_country_name set to India, but australia-feature is true, because geoip resolution of current IP is enabled self.assertEqual( - geoip_not_disabled_res.json()["featureFlags"], {"australia-feature": True, "india-feature": False} + geoip_not_disabled_res.json()["featureFlags"], + {"australia-feature": True, "india-feature": False}, ) # person has geoip_country_name set to India, and australia-feature is false, because geoip resolution of current IP is disabled self.assertEqual( - geoip_disabled_res.json()["featureFlags"], {"australia-feature": False, "india-feature": True} + geoip_disabled_res.json()["featureFlags"], + {"australia-feature": False, "india-feature": True}, ) # test for falsy/truthy values @@ -1817,17 +2307,25 @@ def test_geoip_disable(self, *args): # person has geoip_country_name set to India, but australia-feature is true, because geoip resolution of current IP is enabled self.assertEqual( - geoip_not_disabled_res.json()["featureFlags"], {"australia-feature": True, "india-feature": False} + geoip_not_disabled_res.json()["featureFlags"], + {"australia-feature": True, "india-feature": False}, ) # person has geoip_country_name set to India, and australia-feature is false, because geoip resolution of current IP is disabled - self.assertEqual(geoip_disabled_res.json()["featureFlags"], {"australia-feature": False, "india-feature": True}) + self.assertEqual( + geoip_disabled_res.json()["featureFlags"], + {"australia-feature": False, "india-feature": True}, + ) def test_disable_flags(self, *args): self.team.app_urls = ["https://example.com"] self.team.save() self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"$geoip_country_name": "India"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"$geoip_country_name": "India"}, + ) australia_ip = "13.106.122.3" @@ -1840,7 +2338,13 @@ def test_disable_flags(self, *args): filters={ "groups": [ { - "properties": [{"key": "$geoip_country_name", "value": "Australia", "type": "person"}], + "properties": [ + { + "key": "$geoip_country_name", + "value": "Australia", + "type": "person", + } + ], "rollout_percentage": 100, } ] @@ -1856,7 +2360,13 @@ def test_disable_flags(self, *args): filters={ "groups": [ { - "properties": [{"key": "$geoip_country_name", "value": "India", "type": "person"}], + "properties": [ + { + "key": "$geoip_country_name", + "value": "India", + "type": "person", + } + ], "rollout_percentage": 100, } ] @@ -1873,7 +2383,8 @@ def test_disable_flags(self, *args): # person has geoip_country_name set to India, but australia-feature is true, because geoip resolution of current IP is enabled self.assertEqual( - flags_not_disabled_res.json()["featureFlags"], {"australia-feature": True, "india-feature": False} + flags_not_disabled_res.json()["featureFlags"], + {"australia-feature": True, "india-feature": False}, ) # person has geoip_country_name set to India, and australia-feature is false, because geoip resolution of current IP is disabled self.assertEqual(flags_disabled_res.json()["featureFlags"], {}) @@ -1908,7 +2419,10 @@ def test_decide_doesnt_error_out_when_database_is_down(self, *args): self.assertEqual(response["siteApps"], []) self.assertEqual(response["capturePerformance"], True) self.assertEqual(response["featureFlags"], {}) - self.assertEqual(response["autocaptureExceptions"], {"errors_to_ignore": [], "endpoint": "/e/"}) + self.assertEqual( + response["autocaptureExceptions"], + {"errors_to_ignore": [], "endpoint": "/e/"}, + ) # now database is down with connection.execute_wrapper(QueryTimeoutWrapper()): @@ -1928,7 +2442,10 @@ def test_decide_doesnt_error_out_when_database_is_down(self, *args): self.assertEqual(response["supportedCompression"], ["gzip", "gzip-js"]) self.assertEqual(response["siteApps"], []) self.assertEqual(response["capturePerformance"], True) - self.assertEqual(response["autocaptureExceptions"], {"errors_to_ignore": [], "endpoint": "/e/"}) + self.assertEqual( + response["autocaptureExceptions"], + {"errors_to_ignore": [], "endpoint": "/e/"}, + ) self.assertEqual(response["featureFlags"], {}) def test_decide_with_json_and_numeric_distinct_ids(self, *args): @@ -1977,7 +2494,10 @@ def test_decide_with_json_and_numeric_distinct_ids(self, *args): "updated_at": "2023-04-21T08:43:34.479", }, ) - self.assertEqual(response.json()["featureFlags"], {"random-flag": True, "filer-by-property": True}) + self.assertEqual( + response.json()["featureFlags"], + {"random-flag": True, "filer-by-property": True}, + ) with self.assertNumQueries(4): response = self._post_decide( @@ -1988,7 +2508,10 @@ def test_decide_with_json_and_numeric_distinct_ids(self, *args): with self.assertNumQueries(4): response = self._post_decide(api_version=2, distinct_id={"x": "y"}) - self.assertEqual(response.json()["featureFlags"], {"random-flag": True, "filer-by-property": True}) + self.assertEqual( + response.json()["featureFlags"], + {"random-flag": True, "filer-by-property": True}, + ) with self.assertNumQueries(4): response = self._post_decide(api_version=2, distinct_id={"x": "z"}) @@ -1996,11 +2519,23 @@ def test_decide_with_json_and_numeric_distinct_ids(self, *args): # need to pass in exact string to get the property flag def test_rate_limits(self, *args): - with self.settings(DECIDE_RATE_LIMIT_ENABLED="y", DECIDE_BUCKET_REPLENISH_RATE=0.1, DECIDE_BUCKET_CAPACITY=3): + with self.settings( + DECIDE_RATE_LIMIT_ENABLED="y", + DECIDE_BUCKET_REPLENISH_RATE=0.1, + DECIDE_BUCKET_CAPACITY=3, + ): self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) FeatureFlag.objects.create( team=self.team, @@ -2027,11 +2562,23 @@ def test_rate_limits(self, *args): ) def test_rate_limits_replenish_over_time(self, *args): - with self.settings(DECIDE_RATE_LIMIT_ENABLED="y", DECIDE_BUCKET_REPLENISH_RATE=1, DECIDE_BUCKET_CAPACITY=1): + with self.settings( + DECIDE_RATE_LIMIT_ENABLED="y", + DECIDE_BUCKET_REPLENISH_RATE=1, + DECIDE_BUCKET_CAPACITY=1, + ): self.client.logout() - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) FeatureFlag.objects.create( team=self.team, @@ -2058,7 +2605,11 @@ def test_rate_limits_replenish_over_time(self, *args): def test_rate_limits_work_with_invalid_tokens(self, *args): self.client.logout() - with self.settings(DECIDE_RATE_LIMIT_ENABLED="y", DECIDE_BUCKET_REPLENISH_RATE=0.01, DECIDE_BUCKET_CAPACITY=3): + with self.settings( + DECIDE_RATE_LIMIT_ENABLED="y", + DECIDE_BUCKET_REPLENISH_RATE=0.01, + DECIDE_BUCKET_CAPACITY=3, + ): for _ in range(3): response = self._post_decide(api_version=3, data={"token": "aloha?", "distinct_id": "123"}) self.assertEqual(response.status_code, 401) @@ -2077,7 +2628,11 @@ def test_rate_limits_work_with_invalid_tokens(self, *args): def test_rate_limits_work_with_missing_tokens(self, *args): self.client.logout() - with self.settings(DECIDE_RATE_LIMIT_ENABLED="y", DECIDE_BUCKET_REPLENISH_RATE=0.1, DECIDE_BUCKET_CAPACITY=3): + with self.settings( + DECIDE_RATE_LIMIT_ENABLED="y", + DECIDE_BUCKET_REPLENISH_RATE=0.1, + DECIDE_BUCKET_CAPACITY=3, + ): for _ in range(3): response = self._post_decide(api_version=3, data={"distinct_id": "123"}) self.assertEqual(response.status_code, 401) @@ -2096,7 +2651,11 @@ def test_rate_limits_work_with_missing_tokens(self, *args): def test_rate_limits_work_with_malformed_request(self, *args): self.client.logout() - with self.settings(DECIDE_RATE_LIMIT_ENABLED="y", DECIDE_BUCKET_REPLENISH_RATE=0.1, DECIDE_BUCKET_CAPACITY=4): + with self.settings( + DECIDE_RATE_LIMIT_ENABLED="y", + DECIDE_BUCKET_REPLENISH_RATE=0.1, + DECIDE_BUCKET_CAPACITY=4, + ): def invalid_request(): return self.client.post("/decide/", {"data": "1==1"}, HTTP_ORIGIN="http://127.0.0.1:8000") @@ -2134,11 +2693,20 @@ def test_rate_limits_dont_mix_teams(self, *args): organization=self.organization, api_token=new_token, test_account_filters=[ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ], ) self.client.logout() - with self.settings(DECIDE_RATE_LIMIT_ENABLED="y", DECIDE_BUCKET_REPLENISH_RATE=0.1, DECIDE_BUCKET_CAPACITY=3): + with self.settings( + DECIDE_RATE_LIMIT_ENABLED="y", + DECIDE_BUCKET_REPLENISH_RATE=0.1, + DECIDE_BUCKET_CAPACITY=3, + ): for _ in range(3): response = self._post_decide(api_version=3) self.assertEqual(response.status_code, 200) @@ -2157,7 +2725,11 @@ def test_rate_limits_dont_mix_teams(self, *args): @patch("posthog.models.feature_flag.flag_analytics.CACHE_BUCKET_SIZE", 10) def test_decide_analytics_only_fires_when_enabled(self, *args): FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) self.client.logout() with self.settings(DECIDE_BILLING_SAMPLING_RATE=0): @@ -2174,13 +2746,20 @@ def test_decide_analytics_only_fires_when_enabled(self, *args): client = redis.get_client() # check that single increment made it to redis - self.assertEqual(client.hgetall(f"posthog:decide_requests:{self.team.pk}"), {b"165192618": b"1"}) + self.assertEqual( + client.hgetall(f"posthog:decide_requests:{self.team.pk}"), + {b"165192618": b"1"}, + ) @patch("posthog.models.feature_flag.flag_analytics.CACHE_BUCKET_SIZE", 10) def test_decide_analytics_samples_appropriately(self, *args): random.seed(12345) FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) self.client.logout() with self.settings(DECIDE_BILLING_SAMPLING_RATE=0.5), freeze_time("2022-05-07 12:23:07"): @@ -2191,13 +2770,20 @@ def test_decide_analytics_samples_appropriately(self, *args): client = redis.get_client() # check that no increments made it to redis - self.assertEqual(client.hgetall(f"posthog:decide_requests:{self.team.pk}"), {b"165192618": b"8"}) + self.assertEqual( + client.hgetall(f"posthog:decide_requests:{self.team.pk}"), + {b"165192618": b"8"}, + ) @patch("posthog.models.feature_flag.flag_analytics.CACHE_BUCKET_SIZE", 10) def test_decide_analytics_samples_appropriately_with_small_sample_rate(self, *args): random.seed(12345) FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) self.client.logout() with self.settings(DECIDE_BILLING_SAMPLING_RATE=0.02), freeze_time("2022-05-07 12:23:07"): @@ -2208,13 +2794,20 @@ def test_decide_analytics_samples_appropriately_with_small_sample_rate(self, *ar client = redis.get_client() # check that no increments made it to redis - self.assertEqual(client.hgetall(f"posthog:decide_requests:{self.team.pk}"), {b"165192618": b"50"}) + self.assertEqual( + client.hgetall(f"posthog:decide_requests:{self.team.pk}"), + {b"165192618": b"50"}, + ) @patch("posthog.models.feature_flag.flag_analytics.CACHE_BUCKET_SIZE", 10) def test_decide_analytics_samples_dont_break_with_zero_sampling(self, *args): random.seed(12345) FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) self.client.logout() with self.settings(DECIDE_BILLING_SAMPLING_RATE=0), freeze_time("2022-05-07 12:23:07"): @@ -2230,7 +2823,11 @@ def test_decide_analytics_samples_dont_break_with_zero_sampling(self, *args): @patch("posthog.models.feature_flag.flag_analytics.CACHE_BUCKET_SIZE", 10) def test_decide_analytics_fires_with_survey_linked_and_targeting_flags(self, *args): ff = FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="Beta feature", key="beta-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="Beta feature", + key="beta-feature", + created_by=self.user, ) # use a non-csrf client to make requests req_client = Client() @@ -2240,7 +2837,12 @@ def test_decide_analytics_fires_with_survey_linked_and_targeting_flags(self, *ar data={ "name": "Notebooks power users survey", "type": "popover", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], "linked_flag_id": ff.id, "targeting_flag_filters": { "groups": [ @@ -2248,7 +2850,12 @@ def test_decide_analytics_fires_with_survey_linked_and_targeting_flags(self, *ar "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -2270,7 +2877,10 @@ def test_decide_analytics_fires_with_survey_linked_and_targeting_flags(self, *ar client = redis.get_client() # check that single increment made it to redis - self.assertEqual(client.hgetall(f"posthog:decide_requests:{self.team.pk}"), {b"165192618": b"1"}) + self.assertEqual( + client.hgetall(f"posthog:decide_requests:{self.team.pk}"), + {b"165192618": b"1"}, + ) @patch("posthog.models.feature_flag.flag_analytics.CACHE_BUCKET_SIZE", 10) def test_decide_analytics_fire_for_survey_targeting_flags(self, *args): @@ -2289,14 +2899,24 @@ def test_decide_analytics_fire_for_survey_targeting_flags(self, *args): data={ "name": "Notebooks power users survey", "type": "popover", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], "targeting_flag_filters": { "groups": [ { "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -2318,7 +2938,10 @@ def test_decide_analytics_fire_for_survey_targeting_flags(self, *args): client = redis.get_client() # check that single increment made it to redis - self.assertEqual(client.hgetall(f"posthog:decide_requests:{self.team.pk}"), {b"165192618": b"1"}) + self.assertEqual( + client.hgetall(f"posthog:decide_requests:{self.team.pk}"), + {b"165192618": b"1"}, + ) class TestDatabaseCheckForDecide(BaseTest, QueryMatchingTest): @@ -2409,7 +3032,10 @@ def test_database_check_doesnt_interfere_with_regular_computation(self, *args): # one extra query to select team because not in cache with self.assertNumQueries(6): response = self._post_decide(api_version=3, distinct_id=12345) - self.assertEqual(response.json()["featureFlags"], {"random-flag": True, "filer-by-property": False}) + self.assertEqual( + response.json()["featureFlags"], + {"random-flag": True, "filer-by-property": False}, + ) with self.assertNumQueries(4): response = self._post_decide( @@ -2422,7 +3048,10 @@ def test_database_check_doesnt_interfere_with_regular_computation(self, *args): "updated_at": "2023-04-21T08:43:34.479", }, ) - self.assertEqual(response.json()["featureFlags"], {"random-flag": True, "filer-by-property": True}) + self.assertEqual( + response.json()["featureFlags"], + {"random-flag": True, "filer-by-property": True}, + ) def test_decide_doesnt_error_out_when_database_is_down_and_database_check_isnt_cached(self, *args): ALL_TEAM_PARAMS_FOR_DECIDE = { @@ -2482,7 +3111,8 @@ def test_decide_doesnt_error_out_when_database_is_down_and_database_check_isnt_c @pytest.mark.skipif( - "decide" not in settings.READ_REPLICA_OPT_IN, reason="This test requires READ_REPLICA_OPT_IN=decide" + "decide" not in settings.READ_REPLICA_OPT_IN, + reason="This test requires READ_REPLICA_OPT_IN=decide", ) class TestDecideUsesReadReplica(TransactionTestCase): """ @@ -2519,10 +3149,14 @@ def setup_user_and_team_in_db(self, dbname: str = "default"): ) team = Team.objects.using(dbname).create(organization=organization, name="Team 1 org 1") user = User.objects.using(dbname).create( - email=f"test-{random.randint(1, 100000)}@posthog.com", password="password", first_name="first_name" + email=f"test-{random.randint(1, 100000)}@posthog.com", + password="password", + first_name="first_name", ) OrganizationMembership.objects.using(dbname).create( - user=user, organization=organization, level=OrganizationMembership.Level.OWNER + user=user, + organization=organization, + level=OrganizationMembership.Level.OWNER, ) return organization, team, user @@ -2608,7 +3242,10 @@ def test_healthcheck_uses_read_replica(self): self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual({}, response.json()["featureFlags"]) - @patch("posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", return_value=True) + @patch( + "posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", + return_value=True, + ) def test_decide_uses_read_replica(self, mock_is_connected): org, team, user = self.setup_user_and_team_in_db("default") self.organization, self.team, self.user = org, team, user @@ -2630,7 +3267,12 @@ def test_decide_uses_read_replica(self, mock_is_connected): "groups": [ { "properties": [ - {"key": "email", "value": "posthog", "operator": "icontains", "type": "person"} + { + "key": "email", + "value": "posthog", + "operator": "icontains", + "type": "person", + } ], "rollout_percentage": None, } @@ -2674,7 +3316,10 @@ def test_decide_uses_read_replica(self, mock_is_connected): }, ) - @patch("posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", return_value=True) + @patch( + "posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", + return_value=True, + ) def test_decide_uses_read_replica_for_cohorts_based_flags(self, mock_is_connected): org, team, user = self.setup_user_and_team_in_db("default") self.organization, self.team, self.user = org, team, user @@ -2688,8 +3333,16 @@ def test_decide_uses_read_replica_for_cohorts_based_flags(self, mock_is_connecte { "type": "OR", "values": [ - {"key": "email", "value": "tim@posthog.com", "type": "person"}, - {"key": "email", "value": "tim3@posthog.com", "type": "person"}, + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + }, + { + "key": "email", + "value": "tim3@posthog.com", + "type": "person", + }, ], } ], @@ -2705,13 +3358,34 @@ def test_decide_uses_read_replica_for_cohorts_based_flags(self, mock_is_connecte ) persons = [ - {"distinct_ids": ["example_id"], "properties": {"email": "tim@posthog.com"}}, - {"distinct_ids": ["cohort_founder"], "properties": {"email": "tim2@posthog.com"}}, - {"distinct_ids": ["cohort_secondary"], "properties": {"email": "tim3@posthog.com"}}, + { + "distinct_ids": ["example_id"], + "properties": {"email": "tim@posthog.com"}, + }, + { + "distinct_ids": ["cohort_founder"], + "properties": {"email": "tim2@posthog.com"}, + }, + { + "distinct_ids": ["cohort_secondary"], + "properties": {"email": "tim3@posthog.com"}, + }, ] flags = [ { - "filters": {"groups": [{"properties": [{"key": "id", "value": cohort_static.pk, "type": "cohort"}]}]}, + "filters": { + "groups": [ + { + "properties": [ + { + "key": "id", + "value": cohort_static.pk, + "type": "cohort", + } + ] + } + ] + }, "name": "This is a feature flag with default params, no filters.", "key": "static-flag", }, @@ -2719,7 +3393,13 @@ def test_decide_uses_read_replica_for_cohorts_based_flags(self, mock_is_connecte "filters": { "groups": [ { - "properties": [{"key": "id", "value": cohort_dynamic.pk, "type": "cohort"}], + "properties": [ + { + "key": "id", + "value": cohort_dynamic.pk, + "type": "cohort", + } + ], "rollout_percentage": None, } ] @@ -2732,8 +3412,16 @@ def test_decide_uses_read_replica_for_cohorts_based_flags(self, mock_is_connecte "groups": [ { "properties": [ - {"key": "id", "value": cohort_dynamic.pk, "type": "cohort"}, - {"key": "id", "value": cohort_static.pk, "type": "cohort"}, + { + "key": "id", + "value": cohort_dynamic.pk, + "type": "cohort", + }, + { + "key": "id", + "value": cohort_static.pk, + "type": "cohort", + }, ], "rollout_percentage": None, } @@ -2746,10 +3434,22 @@ def test_decide_uses_read_replica_for_cohorts_based_flags(self, mock_is_connecte "filters": { "groups": [ { - "properties": [{"key": "id", "value": cohort_dynamic.pk, "type": "cohort"}], + "properties": [ + { + "key": "id", + "value": cohort_dynamic.pk, + "type": "cohort", + } + ], }, { - "properties": [{"key": "id", "value": cohort_static.pk, "type": "cohort"}], + "properties": [ + { + "key": "id", + "value": cohort_static.pk, + "type": "cohort", + } + ], }, ] }, @@ -2818,7 +3518,10 @@ def test_decide_uses_read_replica_for_cohorts_based_flags(self, mock_is_connecte }, ) - @patch("posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", return_value=True) + @patch( + "posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", + return_value=True, + ) def test_feature_flags_v3_consistent_flags(self, mock_is_connected): org, team, user = self.setup_user_and_team_in_db("default") self.organization, self.team, self.user = org, team, user @@ -2841,7 +3544,12 @@ def test_feature_flags_v3_consistent_flags(self, mock_is_connected): "groups": [ { "properties": [ - {"key": "email", "value": "posthog", "operator": "icontains", "type": "person"} + { + "key": "email", + "value": "posthog", + "operator": "icontains", + "type": "person", + } ], "rollout_percentage": None, } @@ -2855,9 +3563,21 @@ def test_feature_flags_v3_consistent_flags(self, mock_is_connected): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -2893,10 +3613,16 @@ def test_feature_flags_v3_consistent_flags(self, mock_is_connected): PersonDistinctId.objects.using("default").create(person=person, distinct_id="other_id", team=self.team) # hash key override already exists FeatureFlagHashKeyOverride.objects.using("default").create( - team=self.team, person=person, hash_key="example_id", feature_flag_key="beta-feature" + team=self.team, + person=person, + hash_key="example_id", + feature_flag_key="beta-feature", ) FeatureFlagHashKeyOverride.objects.using("default").create( - team=self.team, person=person, hash_key="example_id", feature_flag_key="multivariate-flag" + team=self.team, + person=person, + hash_key="example_id", + feature_flag_key="multivariate-flag", ) # new request with hash key overrides but not writes should not go to main database @@ -2916,7 +3642,11 @@ def test_feature_flags_v3_consistent_flags(self, mock_is_connected): response = self._post_decide( api_version=3, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example22_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example22_id", + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -2940,7 +3670,11 @@ def test_feature_flags_v3_consistent_flags(self, mock_is_connected): response = self._post_decide( api_version=3, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example22_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example22_id", + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -2950,14 +3684,21 @@ def test_feature_flags_v3_consistent_flags(self, mock_is_connected): with connections["replica"].execute_wrapper(QueryTimeoutWrapper()): response = self._post_decide( api_version=3, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example22_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example22_id", + }, ) self.assertTrue("beta-feature" not in response.json()["featureFlags"]) self.assertTrue("default-flag" not in response.json()["featureFlags"]) self.assertTrue(response.json()["featureFlags"]["default-no-prop-flag"]) self.assertTrue(response.json()["errorsWhileComputingFlags"]) - @patch("posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", return_value=True) + @patch( + "posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", + return_value=True, + ) def test_feature_flags_v3_consistent_flags_with_write_on_hash_key_overrides(self, mock_is_connected): org, team, user = self.setup_user_and_team_in_db("default") self.organization, self.team, self.user = org, team, user @@ -2975,7 +3716,12 @@ def test_feature_flags_v3_consistent_flags_with_write_on_hash_key_overrides(self "groups": [ { "properties": [ - {"key": "email", "value": "posthog", "operator": "icontains", "type": "person"} + { + "key": "email", + "value": "posthog", + "operator": "icontains", + "type": "person", + } ], "rollout_percentage": None, } @@ -2989,9 +3735,21 @@ def test_feature_flags_v3_consistent_flags_with_write_on_hash_key_overrides(self "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -3051,7 +3809,11 @@ def test_feature_flags_v3_consistent_flags_with_write_on_hash_key_overrides(self response = self._post_decide( api_version=3, - data={"token": self.team.api_token, "distinct_id": "other_id", "$anon_distinct_id": "example_id"}, + data={ + "token": self.team.api_token, + "distinct_id": "other_id", + "$anon_distinct_id": "example_id", + }, ) self.assertTrue(response.json()["featureFlags"]["beta-feature"]) self.assertTrue(response.json()["featureFlags"]["default-flag"]) @@ -3060,7 +3822,10 @@ def test_feature_flags_v3_consistent_flags_with_write_on_hash_key_overrides(self "first-variant", response.json()["featureFlags"]["multivariate-flag"] ) # assigned by distinct_id hash - @patch("posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", return_value=True) + @patch( + "posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", + return_value=True, + ) def test_feature_flags_v2_with_groups(self, mock_is_connected): org, team, user = self.setup_user_and_team_in_db("replica") self.organization, self.team, self.user = org, team, user @@ -3115,7 +3880,8 @@ def test_feature_flags_v2_with_groups(self, mock_is_connected): # E 2. SELECT "posthog_grouptypemapping"."id", -- a.k.a. get group type mappings response = self._post_decide(distinct_id="example_id") self.assertEqual( - response.json()["featureFlags"], {"default-no-prop-group-flag": False, "groups-flag": False} + response.json()["featureFlags"], + {"default-no-prop-group-flag": False, "groups-flag": False}, ) self.assertFalse(response.json()["errorsWhileComputingFlags"]) @@ -3126,9 +3892,13 @@ def test_feature_flags_v2_with_groups(self, mock_is_connected): # E 3. SET LOCAL statement_timeout = 600 # E 4. SELECT (UPPER(("posthog_group"."group_properties" ->> 'email')::text) AS "flag_182_condition_0" FROM "posthog_group" -- a.k.a get group0 conditions # E 5. SELECT (true) AS "flag_181_condition_0" FROM "posthog_group" WHERE ("posthog_group"."team_id" = 91 -- a.k.a get group1 conditions - response = self._post_decide(distinct_id="example_id", groups={"organization": "foo2", "project": "bar"}) + response = self._post_decide( + distinct_id="example_id", + groups={"organization": "foo2", "project": "bar"}, + ) self.assertEqual( - response.json()["featureFlags"], {"groups-flag": False, "default-no-prop-group-flag": True} + response.json()["featureFlags"], + {"groups-flag": False, "default-no-prop-group-flag": True}, ) self.assertFalse(response.json()["errorsWhileComputingFlags"]) @@ -3139,11 +3909,20 @@ def test_feature_flags_v2_with_groups(self, mock_is_connected): # E 6. SET LOCAL statement_timeout = 600 # E 7. SELECT (UPPER(("posthog_group"."group_properties" ->> 'email')::text) AS "flag_182_condition_0" FROM "posthog_group" -- a.k.a get group0 conditions # E 8. SELECT (true) AS "flag_181_condition_0" FROM "posthog_group" WHERE ("posthog_group"."team_id" = 91 -- a.k.a get group1 conditions - response = self._post_decide(distinct_id="example_id", groups={"organization": "foo", "project": "bar"}) - self.assertEqual(response.json()["featureFlags"], {"groups-flag": True, "default-no-prop-group-flag": True}) + response = self._post_decide( + distinct_id="example_id", + groups={"organization": "foo", "project": "bar"}, + ) + self.assertEqual( + response.json()["featureFlags"], + {"groups-flag": True, "default-no-prop-group-flag": True}, + ) self.assertFalse(response.json()["errorsWhileComputingFlags"]) - @patch("posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", return_value=True) + @patch( + "posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", + return_value=True, + ) def test_site_apps_in_decide_use_replica(self, mock_is_connected): org, team, user = self.setup_user_and_team_in_db("default") self.organization, self.team, self.user = org, team, user @@ -3157,7 +3936,12 @@ def test_site_apps_in_decide_use_replica(self, mock_is_connected): status=PluginSourceFile.Status.TRANSPILED, ) PluginConfig.objects.create( - plugin=plugin, enabled=True, order=1, team=self.team, config={}, web_token="tokentoken" + plugin=plugin, + enabled=True, + order=1, + team=self.team, + config={}, + web_token="tokentoken", ) sync_team_inject_web_apps(self.team) @@ -3194,9 +3978,21 @@ def test_local_evaluation(self, mock_rate_limit, mock_capture): "groups": [{"rollout_percentage": 20}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -3209,7 +4005,10 @@ def test_local_evaluation(self, mock_rate_limit, mock_capture): { "name": "Group feature", "key": "group-feature", - "filters": {"aggregation_group_type_index": 0, "groups": [{"rollout_percentage": 21}]}, + "filters": { + "aggregation_group_type_index": 0, + "groups": [{"rollout_percentage": 21}], + }, }, format="json", ) @@ -3281,9 +4080,21 @@ def test_local_evaluation(self, mock_rate_limit, mock_capture): "groups": [{"rollout_percentage": 20}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -3299,7 +4110,10 @@ def test_local_evaluation(self, mock_rate_limit, mock_capture): "key": "beta-feature", "filters": { "groups": [ - {"properties": [{"key": "beta-property", "value": "beta-value"}], "rollout_percentage": 51} + { + "properties": [{"key": "beta-property", "value": "beta-value"}], + "rollout_percentage": 51, + } ] }, "deleted": False, @@ -3312,7 +4126,10 @@ def test_local_evaluation(self, mock_rate_limit, mock_capture): { "name": "Group feature", "key": "group-feature", - "filters": {"groups": [{"rollout_percentage": 21}], "aggregation_group_type_index": 0}, + "filters": { + "groups": [{"rollout_percentage": 21}], + "aggregation_group_type_index": 0, + }, "deleted": False, "active": True, "ensure_experience_continuity": False, @@ -3342,8 +4159,16 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, - {"key": "$some_prop2", "value": "nomatchihope2", "type": "person"}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, + { + "key": "$some_prop2", + "value": "nomatchihope2", + "type": "person", + }, ], } ], @@ -3361,7 +4186,11 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, ], } ], @@ -3379,7 +4208,11 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, ], } ], @@ -3397,14 +4230,32 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): "groups": [ { "rollout_percentage": 20, - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], } ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -3420,7 +4271,13 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): "groups": [ { "rollout_percentage": 20, - "properties": [{"key": "id", "type": "cohort", "value": other_cohort1.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": other_cohort1.pk, + } + ], } ], }, @@ -3466,15 +4323,33 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): "filters": { "groups": [ { - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], "rollout_percentage": 20, }, ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -3492,7 +4367,13 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): "filters": { "groups": [ { - "properties": [{"key": "id", "type": "cohort", "value": other_cohort1.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": other_cohort1.pk, + } + ], "rollout_percentage": 20, }, ], @@ -3514,8 +4395,16 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "type": "person", "value": "nomatchihope"}, - {"key": "$some_prop2", "type": "person", "value": "nomatchihope2"}, + { + "key": "$some_prop", + "type": "person", + "value": "nomatchihope", + }, + { + "key": "$some_prop2", + "type": "person", + "value": "nomatchihope2", + }, ], } ], @@ -3526,7 +4415,11 @@ def test_local_evaluation_for_cohorts(self, mock_rate_limit, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "type": "person", "value": "nomatchihope"}, + { + "key": "$some_prop", + "type": "person", + "value": "nomatchihope", + }, ], } ], @@ -3551,8 +4444,16 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_rate_limit, mock_capt { "type": "OR", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, - {"key": "$some_prop2", "value": "nomatchihope2", "type": "person"}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, + { + "key": "$some_prop2", + "value": "nomatchihope2", + "type": "person", + }, ], } ], @@ -3570,9 +4471,22 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_rate_limit, mock_capt { "type": "OR", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, - {"key": "$some_prop2", "value": "nomatchihope2", "type": "person"}, - {"key": "id", "value": cohort_valid_for_ff.pk, "type": "cohort", "negation": True}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, + { + "key": "$some_prop2", + "value": "nomatchihope2", + "type": "person", + }, + { + "key": "id", + "value": cohort_valid_for_ff.pk, + "type": "cohort", + "negation": True, + }, ], } ], @@ -3597,9 +4511,21 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_rate_limit, mock_capt ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -3616,7 +4542,13 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_rate_limit, mock_capt "groups": [ { "rollout_percentage": 20, - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], } ], }, @@ -3663,8 +4595,16 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_rate_limit, mock_capt { "type": "OR", "values": [ - {"key": "$some_prop", "type": "person", "value": "nomatchihope"}, - {"key": "$some_prop2", "type": "person", "value": "nomatchihope2"}, + { + "key": "$some_prop", + "type": "person", + "value": "nomatchihope", + }, + { + "key": "$some_prop2", + "type": "person", + "value": "nomatchihope2", + }, ], } ], @@ -3675,9 +4615,22 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_rate_limit, mock_capt { "type": "OR", "values": [ - {"key": "$some_prop", "type": "person", "value": "nomatchihope"}, - {"key": "$some_prop2", "type": "person", "value": "nomatchihope2"}, - {"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk, "negation": True}, + { + "key": "$some_prop", + "type": "person", + "value": "nomatchihope", + }, + { + "key": "$some_prop2", + "type": "person", + "value": "nomatchihope2", + }, + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + "negation": True, + }, ], } ], @@ -3698,9 +4651,21 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_rate_limit, mock_capt ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -3718,7 +4683,13 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_rate_limit, mock_capt "filters": { "groups": [ { - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], "rollout_percentage": 20, }, ], diff --git a/posthog/api/test/test_early_access_feature.py b/posthog/api/test/test_early_access_feature.py index 303814a07c505..4dc45bcafe2cc 100644 --- a/posthog/api/test/test_early_access_feature.py +++ b/posthog/api/test/test_early_access_feature.py @@ -6,7 +6,12 @@ from posthog.models.early_access_feature import EarlyAccessFeature from posthog.models import FeatureFlag, Person -from posthog.test.base import APIBaseTest, BaseTest, QueryMatchingTest, snapshot_postgres_queries +from posthog.test.base import ( + APIBaseTest, + BaseTest, + QueryMatchingTest, + snapshot_postgres_queries, +) class TestEarlyAccessFeature(APIBaseTest): @@ -128,12 +133,14 @@ def test_update_doesnt_remove_super_condition(self): assert len(response_data["feature_flag"]["filters"]["super_groups"]) == 1 def test_we_dont_delete_existing_flag_information_when_creating_early_access_feature(self): - flag = FeatureFlag.objects.create( team=self.team, filters={ "groups": [ - {"properties": [{"key": "xyz", "value": "ok", "type": "person"}], "rollout_percentage": None} + { + "properties": [{"key": "xyz", "value": "ok", "type": "person"}], + "rollout_percentage": None, + } ], "payloads": {"true": "Hick bondoogling? ????"}, }, @@ -162,7 +169,10 @@ def test_we_dont_delete_existing_flag_information_when_creating_early_access_fea flag.filters, { "groups": [ - {"properties": [{"key": "xyz", "value": "ok", "type": "person"}], "rollout_percentage": None} + { + "properties": [{"key": "xyz", "value": "ok", "type": "person"}], + "rollout_percentage": None, + } ], "payloads": {"true": "Hick bondoogling? ????"}, "super_groups": [ @@ -182,7 +192,6 @@ def test_we_dont_delete_existing_flag_information_when_creating_early_access_fea ) def test_cant_create_early_access_feature_with_duplicate_key(self): - FeatureFlag.objects.create( team=self.team, filters={"groups": [{"properties": [], "rollout_percentage": None}]}, @@ -209,7 +218,6 @@ def test_cant_create_early_access_feature_with_duplicate_key(self): ) def test_can_create_new_early_access_feature_with_soft_deleted_flag(self): - FeatureFlag.objects.create( team=self.team, filters={"groups": [{"properties": [], "rollout_percentage": None}]}, @@ -245,12 +253,14 @@ def test_can_create_new_early_access_feature_with_soft_deleted_flag(self): assert isinstance(response_data["created_at"], str) def test_deleting_early_access_feature_removes_super_condition_from_flag(self): - existing_flag = FeatureFlag.objects.create( team=self.team, filters={ "groups": [ - {"properties": [{"key": "xyz", "value": "ok", "type": "person"}], "rollout_percentage": None} + { + "properties": [{"key": "xyz", "value": "ok", "type": "person"}], + "rollout_percentage": None, + } ] }, key="hick-bondoogling", @@ -283,19 +293,24 @@ def test_deleting_early_access_feature_removes_super_condition_from_flag(self): flag.filters, { "groups": [ - {"properties": [{"key": "xyz", "value": "ok", "type": "person"}], "rollout_percentage": None} + { + "properties": [{"key": "xyz", "value": "ok", "type": "person"}], + "rollout_percentage": None, + } ], "super_groups": None, }, ) def test_cant_soft_delete_flag_with_early_access_feature(self): - existing_flag = FeatureFlag.objects.create( team=self.team, filters={ "groups": [ - {"properties": [{"key": "xyz", "value": "ok", "type": "person"}], "rollout_percentage": None} + { + "properties": [{"key": "xyz", "value": "ok", "type": "person"}], + "rollout_percentage": None, + } ] }, key="hick-bondoogling", @@ -333,7 +348,6 @@ def test_cant_soft_delete_flag_with_early_access_feature(self): ) def test_cant_create_early_access_feature_with_group_flag(self): - flag = FeatureFlag.objects.create( team=self.team, filters={ @@ -364,16 +378,27 @@ def test_cant_create_early_access_feature_with_group_flag(self): ) def test_cant_create_early_access_feature_with_multivariate_flag(self): - flag = FeatureFlag.objects.create( team=self.team, filters={ "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -401,7 +426,6 @@ def test_cant_create_early_access_feature_with_multivariate_flag(self): ) def test_cant_create_early_access_feature_with_flag_with_existing_early_access_feature(self): - flag = FeatureFlag.objects.create( team=self.team, filters={ @@ -520,7 +544,11 @@ def _get_features( @snapshot_postgres_queries def test_early_access_features(self): - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "example@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "example@posthog.com"}, + ) feature_flag = FeatureFlag.objects.create( team=self.team, @@ -573,7 +601,11 @@ def test_early_access_features(self): ) def test_early_access_features_beta_only(self): - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "example@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "example@posthog.com"}, + ) feature_flag = FeatureFlag.objects.create( team=self.team, diff --git a/posthog/api/test/test_element.py b/posthog/api/test/test_element.py index 81b28c3198be7..b7cf8f944a7e9 100644 --- a/posthog/api/test/test_element.py +++ b/posthog/api/test/test_element.py @@ -131,7 +131,12 @@ def test_event_property_values(self) -> None: elements=[Element(tag_name="a", href="https://posthog.com/about", text="click here")], ) team2 = Organization.objects.bootstrap(None)[2] - _create_event(team=team2, distinct_id="test", event="$autocapture", elements=[Element(tag_name="bla")]) + _create_event( + team=team2, + distinct_id="test", + event="$autocapture", + elements=[Element(tag_name="bla")], + ) response = self.client.get("/api/element/values/?key=tag_name").json() self.assertEqual(response[0]["name"], "a") @@ -163,7 +168,10 @@ def test_element_stats_can_filter_by_hogql(self) -> None: self._setup_events() properties_filter = json.dumps( [ - {"type": "hogql", "key": "like(properties.$current_url, '%another_page%')"}, + { + "type": "hogql", + "key": "like(properties.$current_url, '%another_page%')", + }, ] ) response = self.client.get(f"/api/element/stats/?paginate_response=true&properties={properties_filter}").json() @@ -183,8 +191,18 @@ def test_element_stats_clamps_date_from_to_start_of_day(self) -> None: with freeze_time(event_start) as frozen_time: elements = [ - Element(tag_name="a", href="https://posthog.com/about", text="click here", order=0), - Element(tag_name="div", href="https://posthog.com/about", text="click here", order=1), + Element( + tag_name="a", + href="https://posthog.com/about", + text="click here", + order=0, + ), + Element( + tag_name="div", + href="https://posthog.com/about", + text="click here", + order=1, + ), ] _create_event( # 3 am but included because date_from is set to start of day @@ -288,12 +306,26 @@ def test_element_stats_does_not_allow_unexepcted_include(self) -> None: def _setup_events(self): _create_person(distinct_ids=["one"], team=self.team, properties={"email": "one@mail.com"}) _create_person(distinct_ids=["two"], team=self.team, properties={"email": "two@mail.com"}) - _create_person(distinct_ids=["three"], team=self.team, properties={"email": "three@mail.com"}) + _create_person( + distinct_ids=["three"], + team=self.team, + properties={"email": "three@mail.com"}, + ) _create_event( team=self.team, elements=[ - Element(tag_name="a", href="https://posthog.com/event-1", text="event 1", order=0), - Element(tag_name="div", href="https://posthog.com/event-1", text="event 1", order=1), + Element( + tag_name="a", + href="https://posthog.com/event-1", + text="event 1", + order=0, + ), + Element( + tag_name="div", + href="https://posthog.com/event-1", + text="event 1", + order=1, + ), ], event="$autocapture", distinct_id="one", @@ -302,8 +334,18 @@ def _setup_events(self): _create_event( team=self.team, elements=[ - Element(tag_name="a", href="https://posthog.com/event-1", text="event 1", order=0), - Element(tag_name="div", href="https://posthog.com/event-1", text="event 1", order=1), + Element( + tag_name="a", + href="https://posthog.com/event-1", + text="event 1", + order=0, + ), + Element( + tag_name="div", + href="https://posthog.com/event-1", + text="event 1", + order=1, + ), ], event="$autocapture", distinct_id="one", @@ -312,8 +354,18 @@ def _setup_events(self): _create_event( team=self.team, elements=[ - Element(tag_name="a", href="https://posthog.com/event-2", text="event 2", order=0), - Element(tag_name="div", href="https://posthog.com/event-2", text="event 2", order=1), + Element( + tag_name="a", + href="https://posthog.com/event-2", + text="event 2", + order=0, + ), + Element( + tag_name="div", + href="https://posthog.com/event-2", + text="event 2", + order=1, + ), ], event="$autocapture", distinct_id="two", @@ -322,8 +374,18 @@ def _setup_events(self): _create_event( team=self.team, elements=[ - Element(tag_name="a", href="https://posthog.com/event-2", text="event 2", order=0), - Element(tag_name="div", href="https://posthog.com/event-2", text="event 2", order=1), + Element( + tag_name="a", + href="https://posthog.com/event-2", + text="event 2", + order=0, + ), + Element( + tag_name="div", + href="https://posthog.com/event-2", + text="event 2", + order=1, + ), ], event="$autocapture", distinct_id="three", @@ -332,8 +394,18 @@ def _setup_events(self): _create_event( team=self.team, elements=[ - Element(tag_name="a", href="https://posthog.com/event-1", text="event 1", order=0), - Element(tag_name="div", href="https://posthog.com/event-1", text="event 1", order=1), + Element( + tag_name="a", + href="https://posthog.com/event-1", + text="event 1", + order=0, + ), + Element( + tag_name="div", + href="https://posthog.com/event-1", + text="event 1", + order=1, + ), ], event="$rageclick", distinct_id="one", diff --git a/posthog/api/test/test_event.py b/posthog/api/test/test_event.py index b7f746c84a473..dfd24b6589278 100644 --- a/posthog/api/test/test_event.py +++ b/posthog/api/test/test_event.py @@ -41,16 +41,33 @@ def test_filter_events(self): team=self.team, distinct_id="2", properties={"$ip": "8.8.8.8"}, - elements=[Element(tag_name="button", text="something"), Element(tag_name="div")], + elements=[ + Element(tag_name="button", text="something"), + Element(tag_name="div"), + ], + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some-random-uid", + properties={"$ip": "8.8.8.8"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some-other-one", + properties={"$ip": "8.8.8.8"}, ) - _create_event(event="$pageview", team=self.team, distinct_id="some-random-uid", properties={"$ip": "8.8.8.8"}) - _create_event(event="$pageview", team=self.team, distinct_id="some-other-one", properties={"$ip": "8.8.8.8"}) flush_persons_and_events() response = self.client.get(f"/api/projects/{self.team.id}/events/?distinct_id=2").json() self.assertEqual( response["results"][0]["person"], - {"distinct_ids": ["2"], "is_identified": True, "properties": {"email": "tim@posthog.com"}}, + { + "distinct_ids": ["2"], + "is_identified": True, + "properties": {"email": "tim@posthog.com"}, + }, ) self.assertEqual(response["results"][0]["elements"][0]["tag_name"], "button") self.assertEqual(response["results"][0]["elements"][0]["order"], 0) @@ -58,9 +75,23 @@ def test_filter_events(self): @override_settings(PERSON_ON_EVENTS_V2_OVERRIDE=False) def test_filter_events_by_event_name(self): - _create_person(properties={"email": "tim@posthog.com"}, team=self.team, distinct_ids=["2", "some-random-uid"]) - _create_event(event="event_name", team=self.team, distinct_id="2", properties={"$ip": "8.8.8.8"}) - _create_event(event="another event", team=self.team, distinct_id="2", properties={"$ip": "8.8.8.8"}) + _create_person( + properties={"email": "tim@posthog.com"}, + team=self.team, + distinct_ids=["2", "some-random-uid"], + ) + _create_event( + event="event_name", + team=self.team, + distinct_id="2", + properties={"$ip": "8.8.8.8"}, + ) + _create_event( + event="another event", + team=self.team, + distinct_id="2", + properties={"$ip": "8.8.8.8"}, + ) flush_persons_and_events() # Django session, PostHog user, PostHog team, PostHog org membership, @@ -71,10 +102,22 @@ def test_filter_events_by_event_name(self): @override_settings(PERSON_ON_EVENTS_V2_OVERRIDE=False) def test_filter_events_by_properties(self): - _create_person(properties={"email": "tim@posthog.com"}, team=self.team, distinct_ids=["2", "some-random-uid"]) - _create_event(event="event_name", team=self.team, distinct_id="2", properties={"$browser": "Chrome"}) + _create_person( + properties={"email": "tim@posthog.com"}, + team=self.team, + distinct_ids=["2", "some-random-uid"], + ) + _create_event( + event="event_name", + team=self.team, + distinct_id="2", + properties={"$browser": "Chrome"}, + ) event2_uuid = _create_event( - event="event_name", team=self.team, distinct_id="2", properties={"$browser": "Safari"} + event="event_name", + team=self.team, + distinct_id="2", + properties={"$browser": "Safari"}, ) flush_persons_and_events() @@ -96,18 +139,34 @@ def test_filter_events_by_properties(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertDictEqual( - response.json(), self.validation_error_response("Properties are unparsable!", "invalid_input") + response.json(), + self.validation_error_response("Properties are unparsable!", "invalid_input"), ) def test_filter_events_by_precalculated_cohort(self): Person.objects.create(team_id=self.team.pk, distinct_ids=["p1"], properties={"key": "value"}) - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-02T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-02T12:00:00Z", + ) Person.objects.create(team_id=self.team.pk, distinct_ids=["p2"], properties={"key": "value"}) - _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-02T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2020-01-02T12:00:00Z", + ) Person.objects.create(team_id=self.team.pk, distinct_ids=["p3"], properties={"key_2": "value_2"}) - _create_event(team=self.team, event="$pageview", distinct_id="p3", timestamp="2020-01-02T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p3", + timestamp="2020-01-02T12:00:00Z", + ) cohort1 = Cohort.objects.create( team=self.team, @@ -134,11 +193,24 @@ def test_filter_by_person(self): immediate=True, ) - _create_event(event="random event", team=self.team, distinct_id="2", properties={"$ip": "8.8.8.8"}) _create_event( - event="random event", team=self.team, distinct_id="some-random-uid", properties={"$ip": "8.8.8.8"} + event="random event", + team=self.team, + distinct_id="2", + properties={"$ip": "8.8.8.8"}, + ) + _create_event( + event="random event", + team=self.team, + distinct_id="some-random-uid", + properties={"$ip": "8.8.8.8"}, + ) + _create_event( + event="random event", + team=self.team, + distinct_id="some-other-one", + properties={"$ip": "8.8.8.8"}, ) - _create_event(event="random event", team=self.team, distinct_id="some-other-one", properties={"$ip": "8.8.8.8"}) flush_persons_and_events() response = self.client.get(f"/api/projects/{self.team.id}/events/?person_id={person.pk}").json() @@ -160,7 +232,10 @@ def test_custom_event_values(self): distinct_id="bla", event=event, team=self.team, - properties={"random_prop": "don't include", "some other prop": "with some text"}, + properties={ + "random_prop": "don't include", + "some other prop": "with some text", + }, ) response = self.client.get(f"/api/projects/{self.team.id}/events/values/?key=custom_event").json() self.assertListEqual(sorted(events), sorted(event["name"] for event in response)) @@ -173,7 +248,10 @@ def test_event_property_values(self): distinct_id="bla", event="random event", team=self.team, - properties={"random_prop": "don't include", "some other prop": "with some text"}, + properties={ + "random_prop": "don't include", + "some other prop": "with some text", + }, ) with freeze_time("2020-01-20 20:00:00"): @@ -183,10 +261,30 @@ def test_event_property_values(self): team=self.team, properties={"random_prop": "asdf", "some other prop": "with some text"}, ) - _create_event(distinct_id="bla", event="random event", team=self.team, properties={"random_prop": "asdf"}) - _create_event(distinct_id="bla", event="random event", team=self.team, properties={"random_prop": "qwerty"}) - _create_event(distinct_id="bla", event="random event", team=self.team, properties={"random_prop": True}) - _create_event(distinct_id="bla", event="random event", team=self.team, properties={"random_prop": False}) + _create_event( + distinct_id="bla", + event="random event", + team=self.team, + properties={"random_prop": "asdf"}, + ) + _create_event( + distinct_id="bla", + event="random event", + team=self.team, + properties={"random_prop": "qwerty"}, + ) + _create_event( + distinct_id="bla", + event="random event", + team=self.team, + properties={"random_prop": True}, + ) + _create_event( + distinct_id="bla", + event="random event", + team=self.team, + properties={"random_prop": False}, + ) _create_event( distinct_id="bla", event="random event", @@ -194,18 +292,37 @@ def test_event_property_values(self): properties={"random_prop": {"first_name": "Mary", "last_name": "Smith"}}, ) _create_event( - distinct_id="bla", event="random event", team=self.team, properties={"something_else": "qwerty"} + distinct_id="bla", + event="random event", + team=self.team, + properties={"something_else": "qwerty"}, ) - _create_event(distinct_id="bla", event="random event", team=self.team, properties={"random_prop": 565}) _create_event( - distinct_id="bla", event="random event", team=self.team, properties={"random_prop": ["item1", "item2"]} + distinct_id="bla", + event="random event", + team=self.team, + properties={"random_prop": 565}, ) _create_event( - distinct_id="bla", event="random event", team=self.team, properties={"random_prop": ["item3"]} + distinct_id="bla", + event="random event", + team=self.team, + properties={"random_prop": ["item1", "item2"]}, + ) + _create_event( + distinct_id="bla", + event="random event", + team=self.team, + properties={"random_prop": ["item3"]}, ) team2 = Organization.objects.bootstrap(None)[2] - _create_event(distinct_id="bla", event="random event", team=team2, properties={"random_prop": "abcd"}) + _create_event( + distinct_id="bla", + event="random event", + team=team2, + properties={"random_prop": "abcd"}, + ) response = self.client.get(f"/api/projects/{self.team.id}/events/values/?key=random_prop").json() keys = [resp["name"].replace(" ", "") for resp in response] @@ -252,7 +369,11 @@ def test_event_property_values(self): def test_before_and_after(self): user = self._create_user("tim") self.client.force_login(user) - _create_person(properties={"email": "tim@posthog.com"}, team=self.team, distinct_ids=["2", "some-random-uid"]) + _create_person( + properties={"email": "tim@posthog.com"}, + team=self.team, + distinct_ids=["2", "some-random-uid"], + ) with freeze_time("2020-01-10"): event1_uuid = _create_event(team=self.team, event="sign up", distinct_id="2") @@ -325,9 +446,9 @@ def test_pagination(self): from posthog.client import sync_execute self.assertEqual( - sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk})[0][ + sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk},)[ 0 - ], + ][0], 250, ) @@ -374,9 +495,9 @@ def test_pagination_bounded_date_range(self): from posthog.client import sync_execute self.assertEqual( - sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk})[0][ + sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk},)[ 0 - ], + ][0], 25, ) @@ -402,7 +523,8 @@ def test_ascending_order_timestamp(self): ).json() self.assertEqual(len(response["results"]), 10) self.assertLess( - parser.parse(response["results"][0]["timestamp"]), parser.parse(response["results"][-1]["timestamp"]) + parser.parse(response["results"][0]["timestamp"]), + parser.parse(response["results"][-1]["timestamp"]), ) assert "after=" in response["next"] @@ -418,7 +540,8 @@ def test_default_descending_order_timestamp(self): response = self.client.get(f"/api/projects/{self.team.id}/events/?distinct_id=1&limit=10").json() self.assertEqual(len(response["results"]), 10) self.assertGreater( - parser.parse(response["results"][0]["timestamp"]), parser.parse(response["results"][-1]["timestamp"]) + parser.parse(response["results"][0]["timestamp"]), + parser.parse(response["results"][-1]["timestamp"]), ) assert "before=" in response["next"] @@ -436,7 +559,8 @@ def test_specified_descending_order_timestamp(self): ).json() self.assertEqual(len(response["results"]), 10) self.assertGreater( - parser.parse(response["results"][0]["timestamp"]), parser.parse(response["results"][-1]["timestamp"]) + parser.parse(response["results"][0]["timestamp"]), + parser.parse(response["results"][-1]["timestamp"]), ) assert "before=" in response["next"] @@ -448,7 +572,12 @@ def test_action_no_steps(self): self.assertEqual(len(response.json()["results"]), 0) def test_get_single_action(self): - event1_uuid = _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val"}) + event1_uuid = _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val"}, + ) response = self.client.get(f"/api/projects/{self.team.id}/events/%s/" % event1_uuid) self.assertEqual(response.status_code, 200) self.assertEqual(response.json()["event"], "sign up") @@ -456,17 +585,30 @@ def test_get_single_action(self): def test_events_in_future(self): with freeze_time("2012-01-15T04:01:34.000Z"): - _create_event(team=self.team, event="5th action", distinct_id="2", properties={"$os": "Windows 95"}) + _create_event( + team=self.team, + event="5th action", + distinct_id="2", + properties={"$os": "Windows 95"}, + ) # Don't show events more than 5 seconds in the future with freeze_time("2012-01-15T04:01:44.000Z"): - _create_event(team=self.team, event="5th action", distinct_id="2", properties={"$os": "Windows 95"}) + _create_event( + team=self.team, + event="5th action", + distinct_id="2", + properties={"$os": "Windows 95"}, + ) with freeze_time("2012-01-15T04:01:34.000Z"): response = self.client.get(f"/api/projects/{self.team.id}/events/").json() self.assertEqual(len(response["results"]), 1) def test_get_event_by_id(self): _create_person( - properties={"email": "someone@posthog.com"}, team=self.team, distinct_ids=["1"], is_identified=True + properties={"email": "someone@posthog.com"}, + team=self.team, + distinct_ids=["1"], + is_identified=True, ) event_id = _create_event(team=self.team, event="event", distinct_id="1", timestamp=timezone.now()) @@ -484,10 +626,16 @@ def test_get_event_by_id(self): response = self.client.get(f"/api/projects/{self.team.id}/events/123456") # EE will inform the user the ID passed is not a valid UUID - self.assertIn(response.status_code, [status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST]) + self.assertIn( + response.status_code, + [status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST], + ) response = self.client.get(f"/api/projects/{self.team.id}/events/im_a_string_not_an_integer") - self.assertIn(response.status_code, [status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST]) + self.assertIn( + response.status_code, + [status.HTTP_404_NOT_FOUND, status.HTTP_400_BAD_REQUEST], + ) def test_limit(self): _create_person( @@ -502,10 +650,23 @@ def test_limit(self): team=self.team, distinct_id="2", properties={"$ip": "8.8.8.8"}, - elements=[Element(tag_name="button", text="something"), Element(tag_name="div")], + elements=[ + Element(tag_name="button", text="something"), + Element(tag_name="div"), + ], + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some-random-uid", + properties={"$ip": "8.8.8.8"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some-other-one", + properties={"$ip": "8.8.8.8"}, ) - _create_event(event="$pageview", team=self.team, distinct_id="some-random-uid", properties={"$ip": "8.8.8.8"}) - _create_event(event="$pageview", team=self.team, distinct_id="some-other-one", properties={"$ip": "8.8.8.8"}) response = self.client.get(f"/api/projects/{self.team.id}/events/?limit=1").json() self.assertEqual(1, len(response["results"])) @@ -520,16 +681,28 @@ def test_get_events_with_specified_token(self): self.assertNotEqual(user2.team.id, self.team.id) - event1_uuid = _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val"}) - event2_uuid = _create_event(team=user2.team, event="sign up", distinct_id="2", properties={"key": "test_val"}) + event1_uuid = _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val"}, + ) + event2_uuid = _create_event( + team=user2.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val"}, + ) response_team1 = self.client.get(f"/api/projects/{self.team.id}/events/{event1_uuid}/") response_team1_token = self.client.get( - f"/api/projects/{self.team.id}/events/{event1_uuid}/", data={"token": self.team.api_token} + f"/api/projects/{self.team.id}/events/{event1_uuid}/", + data={"token": self.team.api_token}, ) response_team2_event1 = self.client.get( - f"/api/projects/{self.team.id}/events/{event1_uuid}/", data={"token": user2.team.api_token} + f"/api/projects/{self.team.id}/events/{event1_uuid}/", + data={"token": user2.team.api_token}, ) # The feature being tested here is usually used with personal API token auth, @@ -537,7 +710,8 @@ def test_get_events_with_specified_token(self): self.client.force_login(user2) response_team2_event2 = self.client.get( - f"/api/projects/{self.team.id}/events/{event2_uuid}/", data={"token": user2.team.api_token} + f"/api/projects/{self.team.id}/events/{event2_uuid}/", + data={"token": user2.team.api_token}, ) self.assertEqual(response_team1.status_code, status.HTTP_200_OK) @@ -622,7 +796,10 @@ def test_filter_events_by_being_after_properties_with_date_type(self): ).json() self.assertEqual(len(response["results"]), 2) - self.assertEqual([r["event"] for r in response["results"]], ["should_be_included", "should_be_included"]) + self.assertEqual( + [r["event"] for r in response["results"]], + ["should_be_included", "should_be_included"], + ) def test_filter_events_by_being_before_properties_with_date_type(self): journeys_for( diff --git a/posthog/api/test/test_event_definition.py b/posthog/api/test/test_event_definition.py index f9727a2d3d955..9b978490cda7e 100644 --- a/posthog/api/test/test_event_definition.py +++ b/posthog/api/test/test_event_definition.py @@ -18,7 +18,6 @@ @freeze_time("2020-01-02") class TestEventDefinitionAPI(APIBaseTest): - demo_team: Team = None # type: ignore EXPECTED_EVENT_DEFINITIONS: List[Dict[str, Any]] = [ @@ -56,10 +55,12 @@ def test_list_event_definitions(self): for item in self.EXPECTED_EVENT_DEFINITIONS: response_item: Dict[str, Any] = next( - (_i for _i in response.json()["results"] if _i["name"] == item["name"]), {} + (_i for _i in response.json()["results"] if _i["name"] == item["name"]), + {}, ) self.assertAlmostEqual( - (dateutil.parser.isoparse(response_item["created_at"]) - timezone.now()).total_seconds(), 0 + (dateutil.parser.isoparse(response_item["created_at"]) - timezone.now()).total_seconds(), + 0, ) # Test ordering @@ -77,7 +78,11 @@ def test_delete_event_definition(self, mock_capture): self.user.distinct_id, "event definition deleted", properties={"name": "test_event"}, - groups={"instance": ANY, "organization": str(self.organization.id), "project": str(self.demo_team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.organization.id), + "project": str(self.demo_team.uuid), + }, ) activity_log: Optional[ActivityLog] = ActivityLog.objects.first() @@ -133,7 +138,6 @@ def test_cant_see_event_definitions_for_another_team(self): self.assertEqual(response.json(), self.permission_denied_response()) def test_query_event_definitions(self): - # Regular search response = self.client.get("/api/projects/@current/event_definitions/?search=app") self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/posthog/api/test/test_exports.py b/posthog/api/test/test_exports.py index 448afe2a06da9..e96c192a43c9d 100644 --- a/posthog/api/test/test_exports.py +++ b/posthog/api/test/test_exports.py @@ -44,7 +44,10 @@ def teardown_method(self, method) -> None: bucket = s3.Bucket(OBJECT_STORAGE_BUCKET) bucket.objects.filter(Prefix=TEST_ROOT_BUCKET).delete() - insight_filter_dict = {"events": [{"id": "$pageview"}], "properties": [{"key": "$browser", "value": "Mac OS X"}]} + insight_filter_dict = { + "events": [{"id": "$pageview"}], + "properties": [{"key": "$browser", "value": "Mac OS X"}], + } @classmethod def setUpTestData(cls): @@ -64,7 +67,8 @@ def setUpTestData(cls): @patch("posthog.api.exports.exporter") def test_can_create_new_valid_export_dashboard(self, mock_exporter_task) -> None: response = self.client.post( - f"/api/projects/{self.team.id}/exports", {"export_format": "image/png", "dashboard": self.dashboard.id} + f"/api/projects/{self.team.id}/exports", + {"export_format": "image/png", "dashboard": self.dashboard.id}, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) data = response.json() @@ -128,7 +132,11 @@ def test_swallow_missing_schema_and_allow_front_end_to_poll(self, mock_exporter_ }, }, ) - self.assertEqual(response.status_code, status.HTTP_201_CREATED, msg=f"was not HTTP 201 😱 - {response.json()}") + self.assertEqual( + response.status_code, + status.HTTP_201_CREATED, + msg=f"was not HTTP 201 😱 - {response.json()}", + ) data = response.json() mock_exporter_task.export_asset.delay.assert_called_once_with(data["id"]) @@ -136,7 +144,8 @@ def test_swallow_missing_schema_and_allow_front_end_to_poll(self, mock_exporter_ @freeze_time("2021-08-25T22:09:14.252Z") def test_can_create_new_valid_export_insight(self, mock_exporter_task) -> None: response = self.client.post( - f"/api/projects/{self.team.id}/exports", {"export_format": "application/pdf", "insight": self.insight.id} + f"/api/projects/{self.team.id}/exports", + {"export_format": "application/pdf", "insight": self.insight.id}, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) data = response.json() @@ -162,7 +171,10 @@ def test_can_create_new_valid_export_insight(self, mock_exporter_task) -> None: insight_id=self.insight.id, expected=[ { - "user": {"first_name": self.user.first_name, "email": self.user.email}, + "user": { + "first_name": self.user.first_name, + "email": self.user.email, + }, "activity": "exported", "created_at": "2021-08-25T22:09:14.252000Z", "scope": "Insight", @@ -218,7 +230,8 @@ def test_errors_if_bad_format(self) -> None: def test_will_respond_even_if_task_timesout(self, mock_exporter_task) -> None: mock_exporter_task.export_asset.delay.return_value.get.side_effect = celery.exceptions.TimeoutError("timed out") response = self.client.post( - f"/api/projects/{self.team.id}/exports", {"export_format": "application/pdf", "insight": self.insight.id} + f"/api/projects/{self.team.id}/exports", + {"export_format": "application/pdf", "insight": self.insight.id}, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -226,7 +239,8 @@ def test_will_respond_even_if_task_timesout(self, mock_exporter_task) -> None: def test_will_error_if_export_unsupported(self, mock_exporter_task) -> None: mock_exporter_task.export_asset.delay.return_value.get.side_effect = NotImplementedError("not implemented") response = self.client.post( - f"/api/projects/{self.team.id}/exports", {"export_format": "application/pdf", "insight": self.insight.id} + f"/api/projects/{self.team.id}/exports", + {"export_format": "application/pdf", "insight": self.insight.id}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -241,7 +255,8 @@ def test_will_error_if_export_unsupported(self, mock_exporter_task) -> None: def test_will_error_if_dashboard_missing(self) -> None: response = self.client.post( - f"/api/projects/{self.team.id}/exports", {"export_format": "application/pdf", "dashboard": 54321} + f"/api/projects/{self.team.id}/exports", + {"export_format": "application/pdf", "dashboard": 54321}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -259,7 +274,12 @@ def test_will_error_if_export_contains_other_team_dashboard(self) -> None: organization=self.organization, api_token=self.CONFIG_API_TOKEN + "2", test_account_filters=[ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ], ) other_dashboard = Dashboard.objects.create( @@ -286,15 +306,23 @@ def test_will_error_if_export_contains_other_team_insight(self) -> None: organization=self.organization, api_token=self.CONFIG_API_TOKEN + "2", test_account_filters=[ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ], ) other_insight = Insight.objects.create( - filters=Filter(data=self.insight_filter_dict).to_dict(), team=other_team, created_by=self.user + filters=Filter(data=self.insight_filter_dict).to_dict(), + team=other_team, + created_by=self.user, ) response = self.client.post( - f"/api/projects/{self.team.id}/exports", {"export_format": "application/pdf", "insight": other_insight.id} + f"/api/projects/{self.team.id}/exports", + {"export_format": "application/pdf", "insight": other_insight.id}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -310,16 +338,29 @@ def test_will_error_if_export_contains_other_team_insight(self) -> None: @patch("posthog.tasks.exports.csv_exporter.requests.request") def test_can_download_a_csv(self, patched_request) -> None: with self.settings(SITE_URL="http://testserver"): - - _create_event(event="event_name", team=self.team, distinct_id="2", properties={"$browser": "Chrome"}) + _create_event( + event="event_name", + team=self.team, + distinct_id="2", + properties={"$browser": "Chrome"}, + ) expected_event_id = _create_event( - event="event_name", team=self.team, distinct_id="2", properties={"$browser": "Safari"} + event="event_name", + team=self.team, + distinct_id="2", + properties={"$browser": "Safari"}, ) second_expected_event_id = _create_event( - event="event_name", team=self.team, distinct_id="2", properties={"$browser": "Safari"} + event="event_name", + team=self.team, + distinct_id="2", + properties={"$browser": "Safari"}, ) third_expected_event_id = _create_event( - event="event_name", team=self.team, distinct_id="2", properties={"$browser": "Safari"} + event="event_name", + team=self.team, + distinct_id="2", + properties={"$browser": "Safari"}, ) flush_persons_and_events() @@ -346,7 +387,9 @@ def requests_side_effect(*args, **kwargs): }, ) self.assertEqual( - response.status_code, status.HTTP_201_CREATED, msg=f"was not HTTP 201 😱 - {response.json()}" + response.status_code, + status.HTTP_201_CREATED, + msg=f"was not HTTP 201 😱 - {response.json()}", ) instance = response.json() diff --git a/posthog/api/test/test_feature_flag.py b/posthog/api/test/test_feature_flag.py index b243b46200764..0c6581a389561 100644 --- a/posthog/api/test/test_feature_flag.py +++ b/posthog/api/test/test_feature_flag.py @@ -64,7 +64,8 @@ def test_cant_create_flag_with_duplicate_key(self): count = FeatureFlag.objects.count() # Make sure the endpoint works with and without the trailing slash response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags", {"name": "Beta feature", "key": "red_button"} + f"/api/projects/{self.team.id}/feature_flags", + {"name": "Beta feature", "key": "red_button"}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -80,7 +81,11 @@ def test_cant_create_flag_with_duplicate_key(self): def test_cant_update_flag_with_duplicate_key(self): another_feature_flag = FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="some feature", key="some-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="some feature", + key="some-feature", + created_by=self.user, ) response = self.client.patch( f"/api/projects/{self.team.id}/feature_flags/{another_feature_flag.pk}", @@ -111,7 +116,11 @@ def test_cant_update_flag_with_duplicate_key(self): def test_is_simple_flag(self): feature_flag = self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - data={"name": "Beta feature", "key": "beta-feature", "filters": {"groups": [{"rollout_percentage": 65}]}}, + data={ + "name": "Beta feature", + "key": "beta-feature", + "filters": {"groups": [{"rollout_percentage": 65}]}, + }, format="json", ).json() self.assertTrue(feature_flag["is_simple_flag"]) @@ -128,7 +137,12 @@ def test_is_not_simple_flag(self): { "rollout_percentage": 65, "properties": [ - {"key": "email", "type": "person", "value": "@posthog.com", "operator": "icontains"} + { + "key": "email", + "type": "person", + "value": "@posthog.com", + "operator": "icontains", + } ], } ] @@ -145,7 +159,10 @@ def test_is_simple_flag_groups(self, mock_capture): data={ "name": "Beta feature", "key": "beta-feature", - "filters": {"aggregation_group_type_index": 0, "groups": [{"rollout_percentage": 65}]}, + "filters": { + "aggregation_group_type_index": 0, + "groups": [{"rollout_percentage": 65}], + }, }, format="json", ).json() @@ -171,10 +188,13 @@ def test_is_simple_flag_groups(self, mock_capture): @freeze_time("2021-08-25T22:09:14.252Z") @patch("posthog.api.feature_flag.report_user_action") def test_create_feature_flag(self, mock_capture): - response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - {"name": "Alpha feature", "key": "alpha-feature", "filters": {"groups": [{"rollout_percentage": 50}]}}, + { + "name": "Alpha feature", + "key": "alpha-feature", + "filters": {"groups": [{"rollout_percentage": 50}]}, + }, format="json", ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -221,9 +241,10 @@ def test_create_feature_flag(self, mock_capture): @patch("posthog.api.feature_flag.report_user_action") def test_create_minimal_feature_flag(self, mock_capture): - response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags/", {"key": "omega-feature"}, format="json" + f"/api/projects/{self.team.id}/feature_flags/", + {"key": "omega-feature"}, + format="json", ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.json()["key"], "omega-feature") @@ -251,7 +272,6 @@ def test_create_minimal_feature_flag(self, mock_capture): @patch("posthog.api.feature_flag.report_user_action") def test_create_multivariate_feature_flag(self, mock_capture): - response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/", { @@ -261,9 +281,21 @@ def test_create_multivariate_feature_flag(self, mock_capture): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -301,9 +333,21 @@ def test_cant_create_multivariate_feature_flag_with_variant_rollout_lt_100(self) "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 0}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 0, + }, ] }, }, @@ -313,7 +357,8 @@ def test_cant_create_multivariate_feature_flag_with_variant_rollout_lt_100(self) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json().get("type"), "validation_error") self.assertEqual( - response.json().get("detail"), "Invalid variant definitions: Variant rollout percentages must sum to 100." + response.json().get("detail"), + "Invalid variant definitions: Variant rollout percentages must sum to 100.", ) def test_cant_create_multivariate_feature_flag_with_variant_rollout_gt_100(self): @@ -326,9 +371,21 @@ def test_cant_create_multivariate_feature_flag_with_variant_rollout_gt_100(self) "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 50}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 50, + }, ] }, }, @@ -338,7 +395,8 @@ def test_cant_create_multivariate_feature_flag_with_variant_rollout_gt_100(self) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json().get("type"), "validation_error") self.assertEqual( - response.json().get("detail"), "Invalid variant definitions: Variant rollout percentages must sum to 100." + response.json().get("detail"), + "Invalid variant definitions: Variant rollout percentages must sum to 100.", ) def test_cant_create_feature_flag_without_key(self): @@ -347,7 +405,12 @@ def test_cant_create_feature_flag_without_key(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), - {"type": "validation_error", "code": "required", "detail": "This field is required.", "attr": "key"}, + { + "type": "validation_error", + "code": "required", + "detail": "This field is required.", + "attr": "key", + }, ) self.assertEqual(FeatureFlag.objects.count(), count) @@ -358,12 +421,30 @@ def test_cant_create_multivariate_feature_flag_with_invalid_variant_overrides(se "name": "Multivariate feature", "key": "multivariate-feature", "filters": { - "groups": [{"properties": [], "rollout_percentage": None, "variant": "unknown-variant"}], + "groups": [ + { + "properties": [], + "rollout_percentage": None, + "variant": "unknown-variant", + } + ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -372,7 +453,10 @@ def test_cant_create_multivariate_feature_flag_with_invalid_variant_overrides(se ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json().get("type"), "validation_error") - self.assertEqual(response.json().get("detail"), "Filters are not valid (variant override does not exist)") + self.assertEqual( + response.json().get("detail"), + "Filters are not valid (variant override does not exist)", + ) def test_cant_update_multivariate_feature_flag_with_invalid_variant_overrides(self): response = self.client.post( @@ -381,12 +465,30 @@ def test_cant_update_multivariate_feature_flag_with_invalid_variant_overrides(se "name": "Multivariate feature", "key": "multivariate-feature", "filters": { - "groups": [{"properties": [], "rollout_percentage": None, "variant": "second-variant"}], + "groups": [ + { + "properties": [], + "rollout_percentage": None, + "variant": "second-variant", + } + ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -401,12 +503,30 @@ def test_cant_update_multivariate_feature_flag_with_invalid_variant_overrides(se f"/api/projects/{self.team.id}/feature_flags/{feature_flag_id}", { "filters": { - "groups": [{"properties": [], "rollout_percentage": None, "variant": "unknown-variant"}], + "groups": [ + { + "properties": [], + "rollout_percentage": None, + "variant": "unknown-variant", + } + ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 0}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 0, + }, ] }, }, @@ -415,7 +535,10 @@ def test_cant_update_multivariate_feature_flag_with_invalid_variant_overrides(se ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json().get("type"), "validation_error") - self.assertEqual(response.json().get("detail"), "Filters are not valid (variant override does not exist)") + self.assertEqual( + response.json().get("detail"), + "Filters are not valid (variant override does not exist)", + ) @patch("posthog.api.feature_flag.report_user_action") def test_updating_feature_flag(self, mock_capture): @@ -439,7 +562,12 @@ def test_updating_feature_flag(self, mock_capture): { "rollout_percentage": 65, "properties": [ - {"key": "email", "type": "person", "value": "@posthog.com", "operator": "icontains"} + { + "key": "email", + "type": "person", + "value": "@posthog.com", + "operator": "icontains", + } ], } ] @@ -474,7 +602,10 @@ def test_updating_feature_flag(self, mock_capture): flag_id, [ { - "user": {"first_name": self.user.first_name, "email": self.user.email}, + "user": { + "first_name": self.user.first_name, + "email": self.user.email, + }, "activity": "updated", "created_at": "2021-08-25T22:19:14.252000Z", "scope": "FeatureFlag", @@ -517,7 +648,10 @@ def test_updating_feature_flag(self, mock_capture): }, }, { - "user": {"first_name": self.user.first_name, "email": self.user.email}, + "user": { + "first_name": self.user.first_name, + "email": self.user.email, + }, "activity": "created", "created_at": "2021-08-25T22:09:14.252000Z", "scope": "FeatureFlag", @@ -579,7 +713,10 @@ def test_get_feature_flag_activity(self): flag_id, [ { - "user": {"first_name": new_user.first_name, "email": new_user.email}, + "user": { + "first_name": new_user.first_name, + "email": new_user.email, + }, "activity": "updated", "created_at": "2021-08-25T22:19:14.252000Z", "scope": "FeatureFlag", @@ -601,7 +738,10 @@ def test_get_feature_flag_activity(self): }, }, { - "user": {"first_name": new_user.first_name, "email": new_user.email}, + "user": { + "first_name": new_user.first_name, + "email": new_user.email, + }, "activity": "created", "created_at": "2021-08-25T22:09:14.252000Z", "scope": "FeatureFlag", @@ -650,7 +790,8 @@ def test_get_feature_flag_activity_for_all_flags(self): frozen_datetime.tick(delta=datetime.timedelta(minutes=10)) second_create_response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags/", {"name": "a second feature flag", "key": "flag-two"} + f"/api/projects/{self.team.id}/feature_flags/", + {"name": "a second feature flag", "key": "flag-two"}, ) self.assertEqual(second_create_response.status_code, status.HTTP_201_CREATED) @@ -660,15 +801,27 @@ def test_get_feature_flag_activity_for_all_flags(self): flag_id=None, expected=[ { - "user": {"first_name": new_user.first_name, "email": new_user.email}, + "user": { + "first_name": new_user.first_name, + "email": new_user.email, + }, "activity": "created", "created_at": "2021-08-25T22:29:14.252000Z", "scope": "FeatureFlag", "item_id": str(second_flag_id), - "detail": {"changes": None, "trigger": None, "type": None, "name": "flag-two", "short_id": None}, + "detail": { + "changes": None, + "trigger": None, + "type": None, + "name": "flag-two", + "short_id": None, + }, }, { - "user": {"first_name": new_user.first_name, "email": new_user.email}, + "user": { + "first_name": new_user.first_name, + "email": new_user.email, + }, "activity": "updated", "created_at": "2021-08-25T22:19:14.252000Z", "scope": "FeatureFlag", @@ -690,7 +843,10 @@ def test_get_feature_flag_activity_for_all_flags(self): }, }, { - "user": {"first_name": new_user.first_name, "email": new_user.email}, + "user": { + "first_name": new_user.first_name, + "email": new_user.email, + }, "activity": "created", "created_at": "2021-08-25T22:09:14.252000Z", "scope": "FeatureFlag", @@ -785,37 +941,54 @@ def test_get_feature_flag_activity_only_from_own_team(self): # user in org 1 gets activity self.client.force_login(org_one_user) self._get_feature_flag_activity( - flag_id=team_one_flag_one, team_id=org_one_team.id, expected_status=status.HTTP_200_OK + flag_id=team_one_flag_one, + team_id=org_one_team.id, + expected_status=status.HTTP_200_OK, ) self._get_feature_flag_activity( - flag_id=team_one_flag_two, team_id=org_one_team.id, expected_status=status.HTTP_200_OK + flag_id=team_one_flag_two, + team_id=org_one_team.id, + expected_status=status.HTTP_200_OK, ) self._get_feature_flag_activity( - flag_id=team_two_flag_one, team_id=org_one_team.id, expected_status=status.HTTP_404_NOT_FOUND + flag_id=team_two_flag_one, + team_id=org_one_team.id, + expected_status=status.HTTP_404_NOT_FOUND, ) self._get_feature_flag_activity( - flag_id=team_two_flag_two, team_id=org_one_team.id, expected_status=status.HTTP_404_NOT_FOUND + flag_id=team_two_flag_two, + team_id=org_one_team.id, + expected_status=status.HTTP_404_NOT_FOUND, ) # user in org 2 gets activity self.client.force_login(org_two_user) self._get_feature_flag_activity( - flag_id=team_one_flag_two, team_id=org_two_team.id, expected_status=status.HTTP_404_NOT_FOUND + flag_id=team_one_flag_two, + team_id=org_two_team.id, + expected_status=status.HTTP_404_NOT_FOUND, ) self._get_feature_flag_activity( - flag_id=team_one_flag_two, team_id=org_two_team.id, expected_status=status.HTTP_404_NOT_FOUND + flag_id=team_one_flag_two, + team_id=org_two_team.id, + expected_status=status.HTTP_404_NOT_FOUND, ) self._get_feature_flag_activity( - flag_id=team_two_flag_one, team_id=org_two_team.id, expected_status=status.HTTP_200_OK + flag_id=team_two_flag_one, + team_id=org_two_team.id, + expected_status=status.HTTP_200_OK, ) self._get_feature_flag_activity( - flag_id=team_two_flag_two, team_id=org_two_team.id, expected_status=status.HTTP_200_OK + flag_id=team_two_flag_two, + team_id=org_two_team.id, + expected_status=status.HTTP_200_OK, ) def test_paging_all_feature_flag_activity(self): for x in range(15): create_response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags/", {"name": f"feature flag {x}", "key": f"{x}"} + f"/api/projects/{self.team.id}/feature_flags/", + {"name": f"feature flag {x}", "key": f"{x}"}, ) self.assertEqual(create_response.status_code, status.HTTP_201_CREATED) @@ -841,7 +1014,8 @@ def test_paging_all_feature_flag_activity(self): second_page_json = second_page_response.json() self.assertEqual( - [log_item["detail"]["name"] for log_item in second_page_json["results"]], ["4", "3", "2", "1", "0"] + [log_item["detail"]["name"] for log_item in second_page_json["results"]], + ["4", "3", "2", "1", "0"], ) self.assertEqual(second_page_json["next"], None) self.assertEqual( @@ -856,7 +1030,9 @@ def test_paging_specific_feature_flag_activity(self): for x in range(1, 15): update_response = self.client.patch( - f"/api/projects/{self.team.id}/feature_flags/{flag_id}", {"key": str(x)}, format="json" + f"/api/projects/{self.team.id}/feature_flags/{flag_id}", + {"key": str(x)}, + format="json", ) self.assertEqual(update_response.status_code, status.HTTP_200_OK) @@ -914,7 +1090,8 @@ def test_creating_a_feature_flag_with_same_team_and_key_after_deleting(self): FeatureFlag.objects.create(team=self.team, created_by=self.user, key="alpha-feature", deleted=True) response = self.client.post( - f"/api/projects/{self.team.id}/feature_flags/", {"name": "Alpha feature", "key": "alpha-feature"} + f"/api/projects/{self.team.id}/feature_flags/", + {"name": "Alpha feature", "key": "alpha-feature"}, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) instance = FeatureFlag.objects.get(id=response.json()["id"]) @@ -926,7 +1103,9 @@ def test_updating_a_feature_flag_with_same_team_and_key_of_a_deleted_one(self): instance = FeatureFlag.objects.create(team=self.team, created_by=self.user, key="beta-feature") response = self.client.patch( - f"/api/projects/{self.team.id}/feature_flags/{instance.pk}", {"key": "alpha-feature"}, format="json" + f"/api/projects/{self.team.id}/feature_flags/{instance.pk}", + {"key": "alpha-feature"}, + format="json", ) self.assertEqual(response.status_code, status.HTTP_200_OK) instance.refresh_from_db() @@ -935,7 +1114,11 @@ def test_updating_a_feature_flag_with_same_team_and_key_of_a_deleted_one(self): def test_my_flags_is_not_nplus1(self) -> None: self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - data={"name": f"flag", "key": f"flag", "filters": {"groups": [{"rollout_percentage": 5}]}}, + data={ + "name": f"flag", + "key": f"flag", + "filters": {"groups": [{"rollout_percentage": 5}]}, + }, format="json", ).json() @@ -946,7 +1129,11 @@ def test_my_flags_is_not_nplus1(self) -> None: for i in range(1, 4): self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - data={"name": f"flag", "key": f"flag_{i}", "filters": {"groups": [{"rollout_percentage": 5}]}}, + data={ + "name": f"flag", + "key": f"flag_{i}", + "filters": {"groups": [{"rollout_percentage": 5}]}, + }, format="json", ).json() @@ -957,7 +1144,11 @@ def test_my_flags_is_not_nplus1(self) -> None: def test_getting_flags_is_not_nplus1(self) -> None: self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - data={"name": f"flag", "key": f"flag_0", "filters": {"groups": [{"rollout_percentage": 5}]}}, + data={ + "name": f"flag", + "key": f"flag_0", + "filters": {"groups": [{"rollout_percentage": 5}]}, + }, format="json", ).json() @@ -968,7 +1159,11 @@ def test_getting_flags_is_not_nplus1(self) -> None: for i in range(1, 5): self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - data={"name": f"flag", "key": f"flag_{i}", "filters": {"groups": [{"rollout_percentage": 5}]}}, + data={ + "name": f"flag", + "key": f"flag_{i}", + "filters": {"groups": [{"rollout_percentage": 5}]}, + }, format="json", ).json() @@ -987,9 +1182,21 @@ def test_my_flags(self, mock_capture): "groups": [{"rollout_percentage": 20}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1046,7 +1253,10 @@ def test_my_flags_groups(self, mock_capture): { "name": "groups flag", "key": "groups-flag", - "filters": {"aggregation_group_type_index": 0, "groups": [{"rollout_percentage": 100}]}, + "filters": { + "aggregation_group_type_index": 0, + "groups": [{"rollout_percentage": 100}], + }, }, format="json", ) @@ -1060,7 +1270,8 @@ def test_my_flags_groups(self, mock_capture): self.assertEqual(groups_flag["value"], False) response = self.client.get( - f"/api/projects/{self.team.id}/feature_flags/my_flags", data={"groups": json.dumps({"organization": "7"})} + f"/api/projects/{self.team.id}/feature_flags/my_flags", + data={"groups": json.dumps({"organization": "7"})}, ) groups_flag = response.json()[0] self.assertEqual(groups_flag["feature_flag"]["key"], "groups-flag") @@ -1069,10 +1280,13 @@ def test_my_flags_groups(self, mock_capture): @freeze_time("2021-08-25T22:09:14.252Z") @patch("posthog.api.feature_flag.report_user_action") def test_create_feature_flag_usage_dashboard(self, mock_capture): - response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - {"name": "Alpha feature", "key": "alpha-feature", "filters": {"groups": [{"rollout_percentage": 50}]}}, + { + "name": "Alpha feature", + "key": "alpha-feature", + "filters": {"groups": [{"rollout_percentage": 50}]}, + }, format="json", ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -1085,7 +1299,8 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): self.assertEqual(dashboard.name, "Generated Dashboard: alpha-feature Usage") self.assertEqual( - dashboard.description, "This dashboard was generated by the feature flag with key (alpha-feature)" + dashboard.description, + "This dashboard was generated by the feature flag with key (alpha-feature)", ) self.assertEqual(dashboard.filters, {"date_from": "-30d"}) self.assertEqual(len(tiles), 2) @@ -1093,7 +1308,13 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): self.assertEqual( tiles[0].insight.filters, { - "events": [{"id": "$feature_flag_called", "name": "$feature_flag_called", "type": "events"}], + "events": [ + { + "id": "$feature_flag_called", + "name": "$feature_flag_called", + "type": "events", + } + ], "display": "ActionsLineGraph", "insight": "TRENDS", "interval": "day", @@ -1102,7 +1323,16 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "$feature_flag", "type": "event", "value": "alpha-feature"}]} + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "type": "event", + "value": "alpha-feature", + } + ], + } ], }, "breakdown_type": "event", @@ -1114,7 +1344,12 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): tiles[1].insight.filters, { "events": [ - {"id": "$feature_flag_called", "math": "dau", "name": "$feature_flag_called", "type": "events"} + { + "id": "$feature_flag_called", + "math": "dau", + "name": "$feature_flag_called", + "type": "events", + } ], "display": "ActionsTable", "insight": "TRENDS", @@ -1124,7 +1359,16 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "$feature_flag", "type": "event", "value": "alpha-feature"}]} + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "type": "event", + "value": "alpha-feature", + } + ], + } ], }, "breakdown_type": "event", @@ -1149,7 +1393,8 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): self.assertEqual(dashboard.name, "Generated Dashboard: alpha-feature Usage") self.assertEqual( - dashboard.description, "This dashboard was generated by the feature flag with key (alpha-feature)" + dashboard.description, + "This dashboard was generated by the feature flag with key (alpha-feature)", ) self.assertEqual(dashboard.filters, {"date_from": "-30d"}) self.assertEqual(len(tiles), 4) @@ -1157,7 +1402,13 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): self.assertEqual( tiles[0].insight.filters, { - "events": [{"id": "$feature_flag_called", "name": "$feature_flag_called", "type": "events"}], + "events": [ + { + "id": "$feature_flag_called", + "name": "$feature_flag_called", + "type": "events", + } + ], "display": "ActionsLineGraph", "insight": "TRENDS", "interval": "day", @@ -1166,7 +1417,16 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "$feature_flag", "type": "event", "value": "alpha-feature"}]} + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "type": "event", + "value": "alpha-feature", + } + ], + } ], }, "breakdown_type": "event", @@ -1178,7 +1438,12 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): tiles[1].insight.filters, { "events": [ - {"id": "$feature_flag_called", "math": "dau", "name": "$feature_flag_called", "type": "events"} + { + "id": "$feature_flag_called", + "math": "dau", + "name": "$feature_flag_called", + "type": "events", + } ], "display": "ActionsTable", "insight": "TRENDS", @@ -1188,7 +1453,16 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "$feature_flag", "type": "event", "value": "alpha-feature"}]} + { + "type": "AND", + "values": [ + { + "key": "$feature_flag", + "type": "event", + "value": "alpha-feature", + } + ], + } ], }, "breakdown_type": "event", @@ -1202,7 +1476,11 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): tiles[2].insight.filters, { "events": [ - {"id": "$feature_interaction", "name": "Feature Interaction - Total", "type": "events"}, + { + "id": "$feature_interaction", + "name": "Feature Interaction - Total", + "type": "events", + }, { "id": "$feature_interaction", "math": "dau", @@ -1217,7 +1495,16 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "feature_flag", "type": "event", "value": "alpha-feature"}]} + { + "type": "AND", + "values": [ + { + "key": "feature_flag", + "type": "event", + "value": "alpha-feature", + } + ], + } ], }, "filter_test_accounts": False, @@ -1228,8 +1515,17 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): tiles[3].insight.filters, { "events": [ - {"id": "$feature_view", "name": "Feature View - Total", "type": "events"}, - {"id": "$feature_view", "math": "dau", "name": "Feature View - Unique users", "type": "events"}, + { + "id": "$feature_view", + "name": "Feature View - Total", + "type": "events", + }, + { + "id": "$feature_view", + "math": "dau", + "name": "Feature View - Unique users", + "type": "events", + }, ], "display": "ActionsLineGraph", "insight": "TRENDS", @@ -1238,7 +1534,16 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "feature_flag", "type": "event", "value": "alpha-feature"}]} + { + "type": "AND", + "values": [ + { + "key": "feature_flag", + "type": "event", + "value": "alpha-feature", + } + ], + } ], }, "filter_test_accounts": False, @@ -1248,10 +1553,13 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture): @freeze_time("2021-08-25T22:09:14.252Z") @patch("posthog.api.feature_flag.report_user_action") def test_dashboard_enrichment_fails_if_already_enriched(self, mock_capture): - response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - {"name": "Alpha feature", "key": "alpha-feature", "filters": {"groups": [{"rollout_percentage": 50}]}}, + { + "name": "Alpha feature", + "key": "alpha-feature", + "filters": {"groups": [{"rollout_percentage": 50}]}, + }, format="json", ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -1275,14 +1583,20 @@ def test_dashboard_enrichment_fails_if_already_enriched(self, mock_capture): format="json", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) - self.assertEqual(response.json(), {"error": "Usage dashboard already has enriched data", "success": False}) + self.assertEqual( + response.json(), + {"error": "Usage dashboard already has enriched data", "success": False}, + ) @patch("posthog.api.feature_flag.report_user_action") def test_dashboard_enrichment_fails_if_no_enriched_data(self, mock_capture): - response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - {"name": "Alpha feature", "key": "alpha-feature", "filters": {"groups": [{"rollout_percentage": 50}]}}, + { + "name": "Alpha feature", + "key": "alpha-feature", + "filters": {"groups": [{"rollout_percentage": 50}]}, + }, format="json", ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -1296,7 +1610,11 @@ def test_dashboard_enrichment_fails_if_no_enriched_data(self, mock_capture): ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( - response.json(), {"error": "No enriched analytics available for this feature flag", "success": False} + response.json(), + { + "error": "No enriched analytics available for this feature flag", + "success": False, + }, ) @patch("posthog.api.feature_flag.report_user_action") @@ -1314,9 +1632,21 @@ def test_local_evaluation(self, mock_capture): "groups": [{"rollout_percentage": 20}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1329,7 +1659,10 @@ def test_local_evaluation(self, mock_capture): { "name": "Group feature", "key": "group-feature", - "filters": {"aggregation_group_type_index": 0, "groups": [{"rollout_percentage": 21}]}, + "filters": { + "aggregation_group_type_index": 0, + "groups": [{"rollout_percentage": 21}], + }, }, format="json", ) @@ -1386,9 +1719,21 @@ def test_local_evaluation(self, mock_capture): "groups": [{"rollout_percentage": 20}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1404,7 +1749,10 @@ def test_local_evaluation(self, mock_capture): "key": "beta-feature", "filters": { "groups": [ - {"properties": [{"key": "beta-property", "value": "beta-value"}], "rollout_percentage": 51} + { + "properties": [{"key": "beta-property", "value": "beta-value"}], + "rollout_percentage": 51, + } ] }, "deleted": False, @@ -1417,7 +1765,10 @@ def test_local_evaluation(self, mock_capture): { "name": "Group feature", "key": "group-feature", - "filters": {"groups": [{"rollout_percentage": 21}], "aggregation_group_type_index": 0}, + "filters": { + "groups": [{"rollout_percentage": 21}], + "aggregation_group_type_index": 0, + }, "deleted": False, "active": True, "ensure_experience_continuity": False, @@ -1440,8 +1791,16 @@ def test_local_evaluation_for_cohorts(self, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, - {"key": "$some_prop2", "value": "nomatchihope2", "type": "person"}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, + { + "key": "$some_prop2", + "value": "nomatchihope2", + "type": "person", + }, ], } ], @@ -1459,14 +1818,32 @@ def test_local_evaluation_for_cohorts(self, mock_capture): "groups": [ { "rollout_percentage": 20, - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], } ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1497,19 +1874,43 @@ def test_local_evaluation_for_cohorts(self, mock_capture): "filters": { "groups": [ { - "properties": [{"key": "$some_prop", "type": "person", "value": "nomatchihope"}], + "properties": [ + { + "key": "$some_prop", + "type": "person", + "value": "nomatchihope", + } + ], "rollout_percentage": 20, }, { - "properties": [{"key": "$some_prop2", "type": "person", "value": "nomatchihope2"}], + "properties": [ + { + "key": "$some_prop2", + "type": "person", + "value": "nomatchihope2", + } + ], "rollout_percentage": 20, }, ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1533,7 +1934,11 @@ def test_local_evaluation_for_cohorts_with_variant_overrides(self, mock_capture) { "type": "AND", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, ], } ], @@ -1551,13 +1956,24 @@ def test_local_evaluation_for_cohorts_with_variant_overrides(self, mock_capture) "groups": [ { "variant": "test", - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], "rollout_percentage": 100, }, { "variant": "test", "properties": [ - {"key": "email", "type": "person", "value": "@posthog.com", "operator": "icontains"} + { + "key": "email", + "type": "person", + "value": "@posthog.com", + "operator": "icontains", + } ], "rollout_percentage": 100, }, @@ -1597,13 +2013,24 @@ def test_local_evaluation_for_cohorts_with_variant_overrides(self, mock_capture) "groups": [ { "variant": "test", - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], "rollout_percentage": 100, }, { "variant": "test", "properties": [ - {"key": "email", "type": "person", "value": "@posthog.com", "operator": "icontains"} + { + "key": "email", + "type": "person", + "value": "@posthog.com", + "operator": "icontains", + } ], "rollout_percentage": 100, }, @@ -1641,14 +2068,32 @@ def test_local_evaluation_for_static_cohorts(self, mock_capture): "groups": [ { "rollout_percentage": 20, - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], } ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1678,14 +2123,32 @@ def test_local_evaluation_for_static_cohorts(self, mock_capture): "groups": [ { "rollout_percentage": 20, - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], } ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1714,8 +2177,16 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, - {"key": "$some_prop2", "value": "nomatchihope2", "type": "person"}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, + { + "key": "$some_prop2", + "value": "nomatchihope2", + "type": "person", + }, ], } ], @@ -1733,9 +2204,22 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "value": "nomatchihope", "type": "person"}, - {"key": "$some_prop2", "value": "nomatchihope2", "type": "person"}, - {"key": "id", "value": cohort_valid_for_ff.pk, "type": "cohort", "negation": True}, + { + "key": "$some_prop", + "value": "nomatchihope", + "type": "person", + }, + { + "key": "$some_prop2", + "value": "nomatchihope2", + "type": "person", + }, + { + "key": "id", + "value": cohort_valid_for_ff.pk, + "type": "cohort", + "negation": True, + }, ], } ], @@ -1758,9 +2242,21 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_capture): ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1777,7 +2273,13 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_capture): "groups": [ { "rollout_percentage": 20, - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], } ], }, @@ -1810,8 +2312,16 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "type": "person", "value": "nomatchihope"}, - {"key": "$some_prop2", "type": "person", "value": "nomatchihope2"}, + { + "key": "$some_prop", + "type": "person", + "value": "nomatchihope", + }, + { + "key": "$some_prop2", + "type": "person", + "value": "nomatchihope2", + }, ], } ], @@ -1822,9 +2332,22 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_capture): { "type": "OR", "values": [ - {"key": "$some_prop", "type": "person", "value": "nomatchihope"}, - {"key": "$some_prop2", "type": "person", "value": "nomatchihope2"}, - {"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk, "negation": True}, + { + "key": "$some_prop", + "type": "person", + "value": "nomatchihope", + }, + { + "key": "$some_prop2", + "type": "person", + "value": "nomatchihope2", + }, + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + "negation": True, + }, ], } ], @@ -1845,9 +2368,21 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_capture): ], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, }, @@ -1865,7 +2400,13 @@ def test_local_evaluation_for_arbitrary_cohorts(self, mock_capture): "filters": { "groups": [ { - "properties": [{"key": "id", "type": "cohort", "value": cohort_valid_for_ff.pk}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_valid_for_ff.pk, + } + ], "rollout_percentage": 20, }, ], @@ -1924,7 +2465,10 @@ def test_local_evaluation_billing_analytics(self): HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(client.hgetall(f"posthog:local_evaluation_requests:{self.team.pk}"), {b"165192618": b"1"}) + self.assertEqual( + client.hgetall(f"posthog:local_evaluation_requests:{self.team.pk}"), + {b"165192618": b"1"}, + ) for _ in range(5): response = self.client.get( @@ -1933,7 +2477,10 @@ def test_local_evaluation_billing_analytics(self): ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(client.hgetall(f"posthog:local_evaluation_requests:{self.team.pk}"), {b"165192618": b"6"}) + self.assertEqual( + client.hgetall(f"posthog:local_evaluation_requests:{self.team.pk}"), + {b"165192618": b"6"}, + ) @patch("posthog.models.feature_flag.flag_analytics.CACHE_BUCKET_SIZE", 10) def test_local_evaluation_billing_analytics_for_regular_feature_flag_list(self): @@ -1990,7 +2537,10 @@ def test_local_evaluation_billing_analytics_for_regular_feature_flag_list(self): HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(client.hgetall(f"posthog:local_evaluation_requests:{self.team.pk}"), {b"165192618": b"1"}) + self.assertEqual( + client.hgetall(f"posthog:local_evaluation_requests:{self.team.pk}"), + {b"165192618": b"1"}, + ) for _ in range(4): response = self.client.get( @@ -2006,7 +2556,10 @@ def test_local_evaluation_billing_analytics_for_regular_feature_flag_list(self): ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(client.hgetall(f"posthog:local_evaluation_requests:{self.team.pk}"), {b"165192618": b"6"}) + self.assertEqual( + client.hgetall(f"posthog:local_evaluation_requests:{self.team.pk}"), + {b"165192618": b"6"}, + ) @patch("posthog.api.feature_flag.report_user_action") def test_evaluation_reasons(self, mock_capture): @@ -2028,9 +2581,21 @@ def test_evaluation_reasons(self, mock_capture): "groups": [{"rollout_percentage": 20}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ], }, }, @@ -2211,7 +2776,15 @@ def test_evaluation_reasons(self, mock_capture): def test_validation_person_properties(self): person_request = self._create_flag_with_properties( - "person-flag", [{"key": "email", "type": "person", "value": "@posthog.com", "operator": "icontains"}] + "person-flag", + [ + { + "key": "email", + "type": "person", + "value": "@posthog.com", + "operator": "icontains", + } + ], ) self.assertEqual(person_request.status_code, status.HTTP_201_CREATED) @@ -2222,7 +2795,9 @@ def test_validation_person_properties(self): self.assertEqual(cohort_request.status_code, status.HTTP_201_CREATED) event_request = self._create_flag_with_properties( - "illegal-event-flag", [{"key": "id", "value": 5}], expected_status=status.HTTP_400_BAD_REQUEST + "illegal-event-flag", + [{"key": "id", "value": 5}], + expected_status=status.HTTP_400_BAD_REQUEST, ) self.assertEqual( event_request.json(), @@ -2236,7 +2811,14 @@ def test_validation_person_properties(self): groups_request = self._create_flag_with_properties( "illegal-groups-flag", - [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}], + [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ], expected_status=status.HTTP_400_BAD_REQUEST, ) self.assertEqual( @@ -2251,7 +2833,9 @@ def test_validation_person_properties(self): def test_creating_feature_flag_with_non_existant_cohort(self): cohort_request = self._create_flag_with_properties( - "cohort-flag", [{"key": "id", "type": "cohort", "value": 5151}], expected_status=status.HTTP_400_BAD_REQUEST + "cohort-flag", + [{"key": "id", "type": "cohort", "value": 5151}], + expected_status=status.HTTP_400_BAD_REQUEST, ) self.assertDictContainsSubset( @@ -2267,13 +2851,27 @@ def test_creating_feature_flag_with_non_existant_cohort(self): def test_validation_payloads(self): self._create_flag_with_properties( "person-flag", - [{"key": "email", "type": "person", "value": "@posthog.com", "operator": "icontains"}], + [ + { + "key": "email", + "type": "person", + "value": "@posthog.com", + "operator": "icontains", + } + ], payloads={"true": 300}, expected_status=status.HTTP_201_CREATED, ) self._create_flag_with_properties( "person-flag", - [{"key": "email", "type": "person", "value": "@posthog.com", "operator": "icontains"}], + [ + { + "key": "email", + "type": "person", + "value": "@posthog.com", + "operator": "icontains", + } + ], payloads={"some-fake-key": 300}, expected_status=status.HTTP_400_BAD_REQUEST, ) @@ -2287,9 +2885,21 @@ def test_validation_payloads(self): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, "payloads": {"first-variant": {"some": "payload"}}, @@ -2308,12 +2918,27 @@ def test_validation_payloads(self): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, - "payloads": {"first-variant": {"some": "payload"}, "fourth-variant": {"some": "payload"}}, + "payloads": { + "first-variant": {"some": "payload"}, + "fourth-variant": {"some": "payload"}, + }, }, }, format="json", @@ -2329,9 +2954,21 @@ def test_validation_payloads(self): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "first-variant", "name": "First Variant", "rollout_percentage": 50}, - {"key": "second-variant", "name": "Second Variant", "rollout_percentage": 25}, - {"key": "third-variant", "name": "Third Variant", "rollout_percentage": 25}, + { + "key": "first-variant", + "name": "First Variant", + "rollout_percentage": 50, + }, + { + "key": "second-variant", + "name": "Second Variant", + "rollout_percentage": 25, + }, + { + "key": "third-variant", + "name": "Third Variant", + "rollout_percentage": 25, + }, ] }, "payloads": {"first-variant": {"some": "payload"}, "true": 2500}, @@ -2342,7 +2979,6 @@ def test_validation_payloads(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) def test_creating_feature_flag_with_behavioral_cohort(self): - cohort_valid_for_ff = Cohort.objects.create( team=self.team, groups=[{"properties": [{"key": "$some_prop", "value": "nomatchihope", "type": "person"}]}], @@ -2400,7 +3036,13 @@ def test_creating_feature_flag_with_behavioral_cohort(self): "groups": [ { "rollout_percentage": 65, - "properties": [{"key": "id", "type": "cohort", "value": cohort_not_valid_for_ff.id}], + "properties": [ + { + "key": "id", + "type": "cohort", + "value": cohort_not_valid_for_ff.id, + } + ], } ] }, @@ -2421,7 +3063,6 @@ def test_creating_feature_flag_with_behavioral_cohort(self): ) def test_creating_feature_flag_with_nested_behavioral_cohort(self): - cohort_not_valid_for_ff = Cohort.objects.create( team=self.team, filters={ @@ -2445,13 +3086,29 @@ def test_creating_feature_flag_with_nested_behavioral_cohort(self): nested_cohort_not_valid_for_ff = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "id", "value": cohort_not_valid_for_ff.pk, "type": "cohort"}]}], + groups=[ + { + "properties": [ + { + "key": "id", + "value": cohort_not_valid_for_ff.pk, + "type": "cohort", + } + ] + } + ], name="cohort-not-behavioural", ) cohort_request = self._create_flag_with_properties( "cohort-flag", - [{"key": "id", "type": "cohort", "value": nested_cohort_not_valid_for_ff.id}], + [ + { + "key": "id", + "type": "cohort", + "value": nested_cohort_not_valid_for_ff.id, + } + ], expected_status=status.HTTP_400_BAD_REQUEST, ) @@ -2484,14 +3141,28 @@ def test_creating_feature_flag_with_nested_behavioral_cohort(self): def test_validation_group_properties(self): groups_request = self._create_flag_with_properties( "groups-flag", - [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}], + [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ], aggregation_group_type_index=0, ) self.assertEqual(groups_request.status_code, status.HTTP_201_CREATED) illegal_groups_request = self._create_flag_with_properties( "illegal-groups-flag", - [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}], + [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ], aggregation_group_type_index=3, expected_status=status.HTTP_400_BAD_REQUEST, ) @@ -2507,7 +3178,14 @@ def test_validation_group_properties(self): person_request = self._create_flag_with_properties( "person-flag", - [{"key": "email", "type": "person", "value": "@posthog.com", "operator": "icontains"}], + [ + { + "key": "email", + "type": "person", + "value": "@posthog.com", + "operator": "icontains", + } + ], aggregation_group_type_index=0, expected_status=status.HTTP_400_BAD_REQUEST, ) @@ -2534,14 +3212,21 @@ def _create_flag_with_properties( create_response = self.client.post( f"/api/projects/{team_id}/feature_flags/", - data={"name": name, "key": name, "filters": {**kwargs, "groups": [{"properties": properties}]}}, + data={ + "name": name, + "key": name, + "filters": {**kwargs, "groups": [{"properties": properties}]}, + }, format="json", ) self.assertEqual(create_response.status_code, expected_status) return create_response def _get_feature_flag_activity( - self, flag_id: Optional[int] = None, team_id: Optional[int] = None, expected_status: int = status.HTTP_200_OK + self, + flag_id: Optional[int] = None, + team_id: Optional[int] = None, + expected_status: int = status.HTTP_200_OK, ): if team_id is None: team_id = self.team.id @@ -2568,7 +3253,10 @@ def test_patch_api_as_form_data(self): name="some feature", key="some-feature", created_by=self.user, - filters={"groups": [{"properties": [], "rollout_percentage": 100}], "multivariate": None}, + filters={ + "groups": [{"properties": [], "rollout_percentage": 100}], + "multivariate": None, + }, active=True, ) @@ -2583,7 +3271,11 @@ def test_patch_api_as_form_data(self): self.assertEqual(updated_flag.active, False) self.assertEqual(updated_flag.name, "replaced") self.assertEqual( - updated_flag.filters, {"groups": [{"properties": [], "rollout_percentage": 100}], "multivariate": None} + updated_flag.filters, + { + "groups": [{"properties": [], "rollout_percentage": 100}], + "multivariate": None, + }, ) def test_feature_flag_threshold(self): @@ -2592,7 +3284,10 @@ def test_feature_flag_threshold(self): data={ "name": "Beta feature", "key": "beta-feature", - "filters": {"aggregation_group_type_index": 0, "groups": [{"rollout_percentage": 65}]}, + "filters": { + "aggregation_group_type_index": 0, + "groups": [{"rollout_percentage": 65}], + }, "rollback_conditions": [ { "threshold": 5000, @@ -2620,7 +3315,10 @@ def test_feature_flag_threshold(self): self.assertEqual(len(feature_flag["rollback_conditions"]), 1) def test_feature_flag_can_edit(self): - self.assertEqual((AvailableFeature.ROLE_BASED_ACCESS in self.organization.available_features), False) + self.assertEqual( + (AvailableFeature.ROLE_BASED_ACCESS in self.organization.available_features), + False, + ) user_a = User.objects.create_and_join(self.organization, "a@potato.com", None) FeatureFlag.objects.create(team=self.team, created_by=user_a, key="blue_button") res = self.client.get(f"/api/projects/{self.team.id}/feature_flags/") @@ -2633,14 +3331,24 @@ def test_get_flags_dont_return_survey_targeting_flags(self): data={ "name": "Notebooks power users survey", "type": "popover", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], "targeting_flag_filters": { "groups": [ { "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -2665,7 +3373,10 @@ def test_flag_is_cached_on_create_and_update(self): data={ "name": "Beta feature", "key": "beta-feature", - "filters": {"aggregation_group_type_index": 0, "groups": [{"rollout_percentage": 65}]}, + "filters": { + "aggregation_group_type_index": 0, + "groups": [{"rollout_percentage": 65}], + }, }, format="json", ).json() @@ -2713,16 +3424,21 @@ def test_rate_limits_for_local_evaluation_are_independent(self, rate_limit_enabl for _ in range(5): response = self.client.get( - f"/api/projects/{self.team.pk}/feature_flags", HTTP_AUTHORIZATION=f"Bearer {personal_api_key}" + f"/api/projects/{self.team.pk}/feature_flags", + HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_200_OK) # Call to flags gets rate limited response = self.client.get( - f"/api/projects/{self.team.pk}/feature_flags", HTTP_AUTHORIZATION=f"Bearer {personal_api_key}" + f"/api/projects/{self.team.pk}/feature_flags", + HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) - self.assertEqual(len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), 1) + self.assertEqual( + len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), + 1, + ) incr_mock.assert_any_call( "rate_limit_exceeded", tags={ @@ -2738,14 +3454,22 @@ def test_rate_limits_for_local_evaluation_are_independent(self, rate_limit_enabl # but not call to local evaluation for _ in range(7): response = self.client.get( - f"/api/feature_flag/local_evaluation", HTTP_AUTHORIZATION=f"Bearer {personal_api_key}" + f"/api/feature_flag/local_evaluation", + HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), 0) + self.assertEqual( + len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), + 0, + ) def test_feature_flag_dashboard(self): another_feature_flag = FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="some feature", key="some-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="some feature", + key="some-feature", + created_by=self.user, ) dashboard = Dashboard.objects.create(team=self.team, name="private dashboard", created_by=self.user) relationship = FeatureFlagDashboards.objects.create( @@ -2768,7 +3492,11 @@ def test_feature_flag_dashboard(self): def test_feature_flag_dashboard_patch(self): another_feature_flag = FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="some feature", key="some-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="some feature", + key="some-feature", + created_by=self.user, ) dashboard = Dashboard.objects.create(team=self.team, name="private dashboard", created_by=self.user) response = self.client.patch( @@ -2787,7 +3515,11 @@ def test_feature_flag_dashboard_patch(self): def test_feature_flag_dashboard_already_exists(self): another_feature_flag = FeatureFlag.objects.create( - team=self.team, rollout_percentage=50, name="some feature", key="some-feature", created_by=self.user + team=self.team, + rollout_percentage=50, + name="some feature", + key="some-feature", + created_by=self.user, ) dashboard = Dashboard.objects.create(team=self.team, name="private dashboard", created_by=self.user) response = self.client.patch( @@ -2811,15 +3543,25 @@ def test_feature_flag_dashboard_already_exists(self): class TestBlastRadius(ClickhouseTestMixin, APIBaseTest): @snapshot_clickhouse_queries def test_user_blast_radius(self): - for i in range(10): - _create_person(team_id=self.team.pk, distinct_ids=[f"person{i}"], properties={"group": f"{i}"}) + _create_person( + team_id=self.team.pk, + distinct_ids=[f"person{i}"], + properties={"group": f"{i}"}, + ) response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/user_blast_radius", { "condition": { - "properties": [{"key": "group", "type": "person", "value": [0, 1, 2, 3], "operator": "exact"}], + "properties": [ + { + "key": "group", + "type": "person", + "value": [0, 1, 2, 3], + "operator": "exact", + } + ], "rollout_percentage": 25, } }, @@ -2831,12 +3573,18 @@ def test_user_blast_radius(self): self.assertDictContainsSubset({"users_affected": 4, "total_users": 10}, response_json) def test_user_blast_radius_with_zero_users(self): - response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/user_blast_radius", { "condition": { - "properties": [{"key": "group", "type": "person", "value": [0, 1, 2, 3], "operator": "exact"}], + "properties": [ + { + "key": "group", + "type": "person", + "value": [0, 1, 2, 3], + "operator": "exact", + } + ], "rollout_percentage": 25, } }, @@ -2848,15 +3596,25 @@ def test_user_blast_radius_with_zero_users(self): self.assertDictContainsSubset({"users_affected": 0, "total_users": 0}, response_json) def test_user_blast_radius_with_zero_selected_users(self): - for i in range(5): - _create_person(team_id=self.team.pk, distinct_ids=[f"person{i}"], properties={"group": f"{i}"}) + _create_person( + team_id=self.team.pk, + distinct_ids=[f"person{i}"], + properties={"group": f"{i}"}, + ) response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/user_blast_radius", { "condition": { - "properties": [{"key": "group", "type": "person", "value": [8], "operator": "exact"}], + "properties": [ + { + "key": "group", + "type": "person", + "value": [8], + "operator": "exact", + } + ], "rollout_percentage": 25, } }, @@ -2868,9 +3626,12 @@ def test_user_blast_radius_with_zero_selected_users(self): self.assertDictContainsSubset({"users_affected": 0, "total_users": 5}, response_json) def test_user_blast_radius_with_all_selected_users(self): - for i in range(5): - _create_person(team_id=self.team.pk, distinct_ids=[f"person{i}"], properties={"group": f"{i}"}) + _create_person( + team_id=self.team.pk, + distinct_ids=[f"person{i}"], + properties={"group": f"{i}"}, + ) response = self.client.post( f"/api/projects/{self.team.id}/feature_flags/user_blast_radius", @@ -2884,9 +3645,12 @@ def test_user_blast_radius_with_all_selected_users(self): @snapshot_clickhouse_queries def test_user_blast_radius_with_single_cohort(self): - for i in range(10): - _create_person(team_id=self.team.pk, distinct_ids=[f"person{i}"], properties={"group": f"{i}"}) + _create_person( + team_id=self.team.pk, + distinct_ids=[f"person{i}"], + properties={"group": f"{i}"}, + ) cohort1 = Cohort.objects.create( team=self.team, @@ -2943,9 +3707,12 @@ def test_user_blast_radius_with_single_cohort(self): @snapshot_clickhouse_queries def test_user_blast_radius_with_multiple_precalculated_cohorts(self): - for i in range(10): - _create_person(team_id=self.team.pk, distinct_ids=[f"person{i}"], properties={"group": f"{i}"}) + _create_person( + team_id=self.team.pk, + distinct_ids=[f"person{i}"], + properties={"group": f"{i}"}, + ) cohort1 = Cohort.objects.create( team=self.team, @@ -2975,7 +3742,11 @@ def test_user_blast_radius_with_multiple_precalculated_cohorts(self): { "type": "OR", "values": [ - {"key": "group", "value": [1, 2, 4, 5, 6], "type": "person"}, + { + "key": "group", + "value": [1, 2, 4, 5, 6], + "type": "person", + }, ], } ], @@ -3009,9 +3780,12 @@ def test_user_blast_radius_with_multiple_precalculated_cohorts(self): @snapshot_clickhouse_queries def test_user_blast_radius_with_multiple_static_cohorts(self): - for i in range(10): - _create_person(team_id=self.team.pk, distinct_ids=[f"person{i}"], properties={"group": f"{i}"}) + _create_person( + team_id=self.team.pk, + distinct_ids=[f"person{i}"], + properties={"group": f"{i}"}, + ) cohort1 = Cohort.objects.create(team=self.team, groups=[], is_static=True, last_calculation=timezone.now()) cohort1.insert_users_by_list(["person0", "person1", "person2"]) @@ -3025,7 +3799,11 @@ def test_user_blast_radius_with_multiple_static_cohorts(self): { "type": "OR", "values": [ - {"key": "group", "value": [1, 2, 4, 5, 6], "type": "person"}, + { + "key": "group", + "value": [1, 2, 4, 5, 6], + "type": "person", + }, ], } ], @@ -3081,7 +3859,10 @@ def test_user_blast_radius_with_groups(self): for i in range(10): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={"industry": f"{i}"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={"industry": f"{i}"}, ) response = self.client.post( @@ -3113,7 +3894,10 @@ def test_user_blast_radius_with_groups_zero_selected(self): for i in range(5): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={"industry": f"{i}"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={"industry": f"{i}"}, ) response = self.client.post( @@ -3121,7 +3905,13 @@ def test_user_blast_radius_with_groups_zero_selected(self): { "condition": { "properties": [ - {"key": "industry", "type": "group", "value": [8], "operator": "exact", "group_type_index": 0} + { + "key": "industry", + "type": "group", + "value": [8], + "operator": "exact", + "group_type_index": 0, + } ], "rollout_percentage": 25, }, @@ -3140,7 +3930,10 @@ def test_user_blast_radius_with_groups_all_selected(self): for i in range(5): create_group( - team_id=self.team.pk, group_type_index=1, group_key=f"org:{i}", properties={"industry": f"{i}"} + team_id=self.team.pk, + group_type_index=1, + group_key=f"org:{i}", + properties={"industry": f"{i}"}, ) response = self.client.post( @@ -3166,7 +3959,10 @@ def test_user_blast_radius_with_groups_multiple_queries(self): for i in range(10): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={"industry": f"{i}"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={"industry": f"{i}"}, ) response = self.client.post( @@ -3206,7 +4002,10 @@ def test_user_blast_radius_with_groups_incorrect_group_type(self): for i in range(10): create_group( - team_id=self.team.pk, group_type_index=0, group_key=f"org:{i}", properties={"industry": f"{i}"} + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:{i}", + properties={"industry": f"{i}"}, ) response = self.client.post( @@ -3261,7 +4060,10 @@ def slow_query(execute, sql, *args, **kwargs): return execute(f"SELECT pg_sleep(1); {sql}", *args, **kwargs) -@patch("posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", return_value=True) +@patch( + "posthog.models.feature_flag.flag_matching.postgres_healthcheck.is_connected", + return_value=True, +) class TestResiliency(TransactionTestCase, QueryMatchingTest): def setUp(self) -> None: return super().setUp() @@ -3278,7 +4080,12 @@ def test_feature_flags_v3_with_group_properties(self, *args): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) - create_group(team_id=self.team.pk, group_type_index=0, group_key=f"org:1", properties={"industry": f"finance"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:1", + properties={"industry": f"finance"}, + ) serialized_data = FeatureFlagSerializer( data={ @@ -3289,7 +4096,12 @@ def test_feature_flags_v3_with_group_properties(self, *args): "groups": [ { "properties": [ - {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0} + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } ], "rollout_percentage": None, } @@ -3326,7 +4138,6 @@ def test_feature_flags_v3_with_group_properties(self, *args): # now db is down with snapshot_postgres_queries_context(self), connection.execute_wrapper(QueryTimeoutWrapper()): - with self.assertNumQueries(1): all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id", groups={"organization": "org:1"}) @@ -3372,7 +4183,11 @@ def test_feature_flags_v3_with_person_properties(self, mock_counter, *args): create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) serialized_data = FeatureFlagSerializer( data={ @@ -3382,7 +4197,12 @@ def test_feature_flags_v3_with_person_properties(self, mock_counter, *args): "groups": [ { "properties": [ - {"key": "email", "value": "tim@posthog.com", "type": "person", "operator": "exact"} + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + "operator": "exact", + } ], "rollout_percentage": None, } @@ -3426,7 +4246,9 @@ def test_feature_flags_v3_with_person_properties(self, mock_counter, *args): # # now db is down, but decide was sent email parameter with correct email with self.assertNumQueries(0): all_flags, _, _, errors = get_all_feature_flags( - team_id, "random", property_value_overrides={"email": "tim@posthog.com"} + team_id, + "random", + property_value_overrides={"email": "tim@posthog.com"}, ) self.assertTrue(all_flags["property-flag"]) self.assertTrue(all_flags["default-flag"]) @@ -3439,7 +4261,9 @@ def test_feature_flags_v3_with_person_properties(self, mock_counter, *args): # # now db is down, but decide was sent email parameter with different email with self.assertNumQueries(0): all_flags, _, _, errors = get_all_feature_flags( - team_id, "example_id", property_value_overrides={"email": "tom@posthog.com"} + team_id, + "example_id", + property_value_overrides={"email": "tom@posthog.com"}, ) self.assertFalse(all_flags["property-flag"]) self.assertTrue(all_flags["default-flag"]) @@ -3457,7 +4281,11 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): create_request = rf.post(f"api/projects/{self.team.pk}/feature_flags/", {"name": "xyz"}) create_request.user = self.user - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) serialized_data = FeatureFlagSerializer( data={ @@ -3467,7 +4295,12 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): "groups": [ { "properties": [ - {"key": "email", "value": "tim@posthog.com", "type": "person", "operator": "exact"} + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + "operator": "exact", + } ], "rollout_percentage": None, } @@ -3502,7 +4335,8 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): # now db is slow and times out with snapshot_postgres_queries_context(self), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", 500 + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, ): mock_postgres_check.return_value = False all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id") @@ -3514,7 +4348,9 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): # # now db is down, but decide was sent email parameter with correct email with self.assertNumQueries(0): all_flags, _, _, errors = get_all_feature_flags( - team_id, "random", property_value_overrides={"email": "tim@posthog.com"} + team_id, + "random", + property_value_overrides={"email": "tim@posthog.com"}, ) self.assertTrue(all_flags["property-flag"]) self.assertTrue(all_flags["default-flag"]) @@ -3523,7 +4359,9 @@ def test_feature_flags_v3_with_a_working_slow_db(self, mock_postgres_check): # # now db is down, but decide was sent email parameter with different email with self.assertNumQueries(0): all_flags, _, _, errors = get_all_feature_flags( - team_id, "example_id", property_value_overrides={"email": "tom@posthog.com"} + team_id, + "example_id", + property_value_overrides={"email": "tom@posthog.com"}, ) self.assertFalse(all_flags["property-flag"]) self.assertTrue(all_flags["default-flag"]) @@ -3537,7 +4375,11 @@ def test_feature_flags_v3_with_slow_db_doesnt_try_to_compute_conditions_again(se team_id = self.team.pk - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com"}, + ) FeatureFlag.objects.create( name="Alpha feature", @@ -3546,7 +4388,12 @@ def test_feature_flags_v3_with_slow_db_doesnt_try_to_compute_conditions_again(se "groups": [ { "properties": [ - {"key": "email", "value": "tim@posthog.com", "type": "person", "operator": "exact"} + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + "operator": "exact", + } ], "rollout_percentage": None, } @@ -3563,7 +4410,12 @@ def test_feature_flags_v3_with_slow_db_doesnt_try_to_compute_conditions_again(se "groups": [ { "properties": [ - {"key": "email", "value": "tim@posthog.com", "type": "person", "operator": "exact"} + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + "operator": "exact", + } ], "rollout_percentage": None, } @@ -3593,7 +4445,8 @@ def test_feature_flags_v3_with_slow_db_doesnt_try_to_compute_conditions_again(se # now db is slow and times out with snapshot_postgres_queries_context(self), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", 500 + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, ), self.assertNumQueries(2): # no extra queries to get person properties for the second flag after first one failed all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id") @@ -3625,7 +4478,12 @@ def test_feature_flags_v3_with_group_properties_and_slow_db(self, mock_counter, GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) - create_group(team_id=self.team.pk, group_type_index=0, group_key=f"org:1", properties={"industry": f"finance"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key=f"org:1", + properties={"industry": f"finance"}, + ) serialized_data = FeatureFlagSerializer( data={ @@ -3636,7 +4494,12 @@ def test_feature_flags_v3_with_group_properties_and_slow_db(self, mock_counter, "groups": [ { "properties": [ - {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0} + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } ], "rollout_percentage": None, } @@ -3673,9 +4536,9 @@ def test_feature_flags_v3_with_group_properties_and_slow_db(self, mock_counter, # now db is slow with snapshot_postgres_queries_context(self), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", 500 + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, ): - with self.assertNumQueries(2): all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id", groups={"organization": "org:1"}) @@ -3731,7 +4594,9 @@ def test_feature_flags_v3_with_experience_continuity_working_slow_db(self, mock_ create_request.user = self.user Person.objects.create( - team=self.team, distinct_ids=["example_id", "random"], properties={"email": "tim@posthog.com"} + team=self.team, + distinct_ids=["example_id", "random"], + properties={"email": "tim@posthog.com"}, ) serialized_data = FeatureFlagSerializer( @@ -3742,7 +4607,12 @@ def test_feature_flags_v3_with_experience_continuity_working_slow_db(self, mock_ "groups": [ { "properties": [ - {"key": "email", "value": "tim@posthog.com", "type": "person", "operator": "exact"} + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + "operator": "exact", + } ], "rollout_percentage": 91, } @@ -3776,7 +4646,8 @@ def test_feature_flags_v3_with_experience_continuity_working_slow_db(self, mock_ # db is slow and times out with snapshot_postgres_queries_context(self), connection.execute_wrapper(slow_query), patch( - "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", 500 + "posthog.models.feature_flag.flag_matching.FLAG_MATCHING_QUERY_TIMEOUT_MS", + 500, ): all_flags, _, _, errors = get_all_feature_flags(team_id, "example_id", hash_key_override="random") @@ -3788,7 +4659,9 @@ def test_feature_flags_v3_with_experience_continuity_working_slow_db(self, mock_ # still need to get hash key override from db, so should time out with self.assertNumQueries(2): all_flags, _, _, errors = get_all_feature_flags( - team_id, "random", property_value_overrides={"email": "tim@posthog.com"} + team_id, + "random", + property_value_overrides={"email": "tim@posthog.com"}, ) self.assertTrue("property-flag" not in all_flags) self.assertTrue(all_flags["default-flag"]) @@ -3813,7 +4686,9 @@ def test_feature_flags_v3_with_experience_continuity_and_incident_mode(self, moc create_request.user = self.user Person.objects.create( - team=self.team, distinct_ids=["example_id", "random"], properties={"email": "tim@posthog.com"} + team=self.team, + distinct_ids=["example_id", "random"], + properties={"email": "tim@posthog.com"}, ) serialized_data = FeatureFlagSerializer( @@ -3824,7 +4699,12 @@ def test_feature_flags_v3_with_experience_continuity_and_incident_mode(self, moc "groups": [ { "properties": [ - {"key": "email", "value": "tim@posthog.com", "type": "person", "operator": "exact"} + { + "key": "email", + "value": "tim@posthog.com", + "type": "person", + "operator": "exact", + } ], "rollout_percentage": 91, } diff --git a/posthog/api/test/test_geoip.py b/posthog/api/test/test_geoip.py index ecc6bb237989f..dae4ec7064ffc 100644 --- a/posthog/api/test/test_geoip.py +++ b/posthog/api/test/test_geoip.py @@ -14,7 +14,12 @@ @pytest.mark.parametrize( - "test_input,expected", [(australia_ip, "Australia"), (uk_ip, "United Kingdom"), (us_ip_v6, "United States")] + "test_input,expected", + [ + (australia_ip, "Australia"), + (uk_ip, "United Kingdom"), + (us_ip_v6, "United States"), + ], ) def test_geoip_results(test_input, expected): properties = get_geoip_properties(test_input) diff --git a/posthog/api/test/test_ingestion_warnings.py b/posthog/api/test/test_ingestion_warnings.py index b6843f376766c..bdf3996955909 100644 --- a/posthog/api/test/test_ingestion_warnings.py +++ b/posthog/api/test/test_ingestion_warnings.py @@ -99,7 +99,13 @@ def test_ingestion_warnings_api(self): { "type": "another_type", "lastSeen": "2021-11-15T00:00:00Z", - "warnings": [{"type": "another_type", "timestamp": "2021-11-15T00:00:00Z", "details": {}}], + "warnings": [ + { + "type": "another_type", + "timestamp": "2021-11-15T00:00:00Z", + "details": {}, + } + ], "count": 1, }, ] diff --git a/posthog/api/test/test_insight.py b/posthog/api/test/test_insight.py index 8becdf0ac7e60..c2b50abcaa468 100644 --- a/posthog/api/test/test_insight.py +++ b/posthog/api/test/test_insight.py @@ -358,7 +358,13 @@ def test_listing_insights_does_not_nplus1(self) -> None: # adding more insights doesn't change the query count self.assertEqual( - [FuzzyInt(10, 11), FuzzyInt(10, 11), FuzzyInt(10, 11), FuzzyInt(10, 11), FuzzyInt(10, 11)], + [ + FuzzyInt(10, 11), + FuzzyInt(10, 11), + FuzzyInt(10, 11), + FuzzyInt(10, 11), + FuzzyInt(10, 11), + ], query_counts, f"received query counts\n\n{query_counts}", ) @@ -584,7 +590,9 @@ def test_can_update_insight_with_inconsistent_dashboards(self) -> None: # accidentally include a deleted dashboard _, update_response = self.dashboard_api.update_insight( - insight_id, {"dashboards": [deleted_dashboard_id]}, expected_status=status.HTTP_400_BAD_REQUEST + insight_id, + {"dashboards": [deleted_dashboard_id]}, + expected_status=status.HTTP_400_BAD_REQUEST, ) # confirm no updates happened @@ -1408,7 +1416,12 @@ def test_insight_paths_basic(self) -> None: f"/api/projects/{self.team.id}/insights/path", data={ "properties": json.dumps( - [{"key": "properties.test == 'val' and person.properties.$os == 'Mac'", "type": "hogql"}] + [ + { + "key": "properties.test == 'val' and person.properties.$os == 'Mac'", + "type": "hogql", + } + ] ) }, ).json() @@ -1418,7 +1431,12 @@ def test_insight_paths_basic(self) -> None: f"/api/projects/{self.team.id}/insights/path", data={ "properties": json.dumps( - [{"key": "properties.test == 'val' and person.properties.$os == 'Windows'", "type": "hogql"}] + [ + { + "key": "properties.test == 'val' and person.properties.$os == 'Windows'", + "type": "hogql", + } + ] ) }, ).json() @@ -1549,24 +1567,40 @@ def test_logged_out_user_can_retrieve_insight_with_correct_insight_sharing_acces f"/api/projects/{self.team.id}/insights/?sharing_access_token={sharing_configuration.access_token}", ) - self.assertEqual(response_invalid_token_retrieve.status_code, 403, response_invalid_token_retrieve.json()) + self.assertEqual( + response_invalid_token_retrieve.status_code, + 403, + response_invalid_token_retrieve.json(), + ) self.assertEqual( response_invalid_token_retrieve.json(), self.unauthenticated_response("Sharing access token is invalid.", "authentication_failed"), ) - self.assertEqual(response_incorrect_token_retrieve.status_code, 404, response_incorrect_token_retrieve.json()) + self.assertEqual( + response_incorrect_token_retrieve.status_code, + 404, + response_incorrect_token_retrieve.json(), + ) self.assertEqual( response_incorrect_token_retrieve.json(), self.not_found_response(), ) - self.assertEqual(response_correct_token_retrieve.status_code, 200, response_correct_token_retrieve.json()) + self.assertEqual( + response_correct_token_retrieve.status_code, + 200, + response_correct_token_retrieve.json(), + ) self.assertDictContainsSubset( { "name": "Foobar", }, response_correct_token_retrieve.json(), ) - self.assertEqual(response_correct_token_list.status_code, 200, response_correct_token_list.json()) + self.assertEqual( + response_correct_token_list.status_code, + 200, + response_correct_token_list.json(), + ) # abcdfghi not returned as it's not related to this sharing configuration self.assertEqual(response_correct_token_list.json()["count"], 1) self.assertDictContainsSubset( @@ -1622,7 +1656,8 @@ def test_logged_out_user_cannot_update_insight_with_correct_insight_sharing_acce self.assertEqual( response_retrieve.json(), self.unauthenticated_response( - "Sharing access token can only be used for GET requests.", "authentication_failed" + "Sharing access token can only be used for GET requests.", + "authentication_failed", ), ) @@ -1634,7 +1669,10 @@ def test_logged_out_user_cannot_retrieve_insight_with_disabled_insight_sharing_a short_id="12345678", ) sharing_configuration = SharingConfiguration.objects.create( - team=self.team, insight=insight, enabled=False, access_token="xyz" # DISABLED! + team=self.team, + insight=insight, + enabled=False, + access_token="xyz", # DISABLED! ) response_retrieve = self.client.get( @@ -1708,16 +1746,28 @@ def test_logged_out_user_can_retrieve_insight_with_correct_dashboard_sharing_acc f"/api/projects/{self.team.id}/insights/?sharing_access_token={sharing_configuration.access_token}", ) - self.assertEqual(response_incorrect_token_retrieve.status_code, 403, response_incorrect_token_retrieve.json()) + self.assertEqual( + response_incorrect_token_retrieve.status_code, + 403, + response_incorrect_token_retrieve.json(), + ) self.assertEqual( response_incorrect_token_retrieve.json(), self.unauthenticated_response("Sharing access token is invalid.", "authentication_failed"), ) - self.assertEqual(response_correct_token_retrieve.status_code, 200, response_correct_token_retrieve.json()) + self.assertEqual( + response_correct_token_retrieve.status_code, + 200, + response_correct_token_retrieve.json(), + ) self.assertDictContainsSubset({"name": "Foobar"}, response_correct_token_retrieve.json()) # Below checks that the deleted insight and non-deleted insight whose tile is deleted are not be retrievable # Also, the text tile should not affect things - self.assertEqual(response_correct_token_list.status_code, 200, response_correct_token_list.json()) + self.assertEqual( + response_correct_token_list.status_code, + 200, + response_correct_token_list.json(), + ) self.assertEqual(response_correct_token_list.json()["count"], 1) def test_logged_out_user_cannot_retrieve_insight_with_correct_deleted_dashboard_sharing_access_token(self) -> None: @@ -1738,7 +1788,11 @@ def test_logged_out_user_cannot_retrieve_insight_with_correct_deleted_dashboard_ f"/api/projects/{self.team.id}/insights/?sharing_access_token={sharing_configuration.access_token}", ) - self.assertEqual(response_correct_token_list.status_code, 200, response_correct_token_list.json()) + self.assertEqual( + response_correct_token_list.status_code, + 200, + response_correct_token_list.json(), + ) self.assertEqual(response_correct_token_list.json()["count"], 0) def test_insight_trends_csv(self) -> None: @@ -1928,7 +1982,14 @@ def test_get_recently_viewed_insights_excludes_query_based_insights_by_default(s "properties.$lib", "timestamp", ], - "properties": [{"type": "event", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "event", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], "limit": 100, }, }, @@ -1962,7 +2023,14 @@ def test_get_recently_viewed_insights_can_include_query_based_insights(self) -> "properties.$lib", "timestamp", ], - "properties": [{"type": "event", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "event", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], "limit": 100, }, }, @@ -2157,7 +2225,10 @@ def test_soft_delete_can_be_reversed_by_patch(self) -> None: self.client.patch( f"/api/projects/{self.team.id}/insights/{insight_id}", - {"deleted": True, "name": "an insight"}, # This request should work also if other fields are provided + { + "deleted": True, + "name": "an insight", + }, # This request should work also if other fields are provided ) self.assertEqual( @@ -2167,7 +2238,10 @@ def test_soft_delete_can_be_reversed_by_patch(self) -> None: update_response = self.client.patch( f"/api/projects/{self.team.id}/insights/{insight_id}", - {"deleted": False, "name": "an insight"}, # This request should work also if other fields are provided + { + "deleted": False, + "name": "an insight", + }, # This request should work also if other fields are provided ) self.assertEqual(update_response.status_code, status.HTTP_200_OK) @@ -2201,7 +2275,8 @@ def test_soft_delete_cannot_be_reversed_for_another_team(self) -> None: ) other_update_response = self.client.patch( - f"/api/projects/{self.team.id}/insights/{other_insight.id}", {"deleted": False} + f"/api/projects/{self.team.id}/insights/{other_insight.id}", + {"deleted": False}, ) self.assertEqual(other_update_response.status_code, status.HTTP_404_NOT_FOUND) @@ -2244,7 +2319,12 @@ def test_insight_trend_hogql_global_filters(self) -> None: _create_person(team=self.team, distinct_ids=["1"], properties={"fish": "there is no fish"}) with freeze_time("2012-01-14T03:21:34.000Z"): for i in range(25): - _create_event(team=self.team, event="$pageview", distinct_id="1", properties={"int_value": i}) + _create_event( + team=self.team, + event="$pageview", + distinct_id="1", + properties={"int_value": i}, + ) with freeze_time("2012-01-15T04:01:34.000Z"): # 25 events total response = self.client.get( @@ -2261,8 +2341,14 @@ def test_insight_trend_hogql_global_filters(self) -> None: "events": json.dumps([{"id": "$pageview"}]), "properties": json.dumps( [ - {"key": "toInt(properties.int_value) > 10 and 'bla' != 'a%sd'", "type": "hogql"}, - {"key": "like(person.properties.fish, '%fish%')", "type": "hogql"}, + { + "key": "toInt(properties.int_value) > 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, + { + "key": "like(person.properties.fish, '%fish%')", + "type": "hogql", + }, ] ), }, @@ -2283,7 +2369,11 @@ def test_insight_trend_hogql_global_filters(self) -> None: ), }, ) - self.assertEqual(response_placeholder.status_code, status.HTTP_400_BAD_REQUEST, response_placeholder.json()) + self.assertEqual( + response_placeholder.status_code, + status.HTTP_400_BAD_REQUEST, + response_placeholder.json(), + ) self.assertEqual( response_placeholder.json(), self.validation_error_response("Placeholders, such as {team_id}, are not supported in this context"), @@ -2295,7 +2385,12 @@ def test_insight_trend_hogql_local_filters(self) -> None: _create_person(team=self.team, distinct_ids=["1"], properties={"fish": "there is no fish"}) with freeze_time("2012-01-14T03:21:34.000Z"): for i in range(25): - _create_event(team=self.team, event="$pageview", distinct_id="1", properties={"int_value": i}) + _create_event( + team=self.team, + event="$pageview", + distinct_id="1", + properties={"int_value": i}, + ) with freeze_time("2012-01-15T04:01:34.000Z"): # test trends local property filter response = self.client.get( @@ -2311,7 +2406,10 @@ def test_insight_trend_hogql_local_filters(self) -> None: "key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql", }, - {"key": "like(person.properties.fish, '%fish%')", "type": "hogql"}, + { + "key": "like(person.properties.fish, '%fish%')", + "type": "hogql", + }, ] ), } @@ -2328,7 +2426,12 @@ def test_insight_trend_hogql_breakdown(self) -> None: _create_person(team=self.team, distinct_ids=["1"], properties={"fish": "there is no fish"}) with freeze_time("2012-01-14T03:21:34.000Z"): for i in range(25): - _create_event(team=self.team, event="$pageview", distinct_id="1", properties={"int_value": i}) + _create_event( + team=self.team, + event="$pageview", + distinct_id="1", + properties={"int_value": i}, + ) with freeze_time("2012-01-15T04:01:34.000Z"): # test trends breakdown response = self.client.get( @@ -2349,9 +2452,23 @@ def test_insight_trend_hogql_breakdown(self) -> None: @also_test_with_materialized_columns(event_properties=["int_value"], person_properties=["fish"]) def test_insight_funnels_hogql_global_filters(self) -> None: with freeze_time("2012-01-15T04:01:34.000Z"): - _create_person(team=self.team, distinct_ids=["1"], properties={"fish": "there is no fish"}) - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"int_value": 1}) - _create_event(team=self.team, event="user did things", distinct_id="1", properties={"int_value": 20}) + _create_person( + team=self.team, + distinct_ids=["1"], + properties={"fish": "there is no fish"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"int_value": 1}, + ) + _create_event( + team=self.team, + event="user did things", + distinct_id="1", + properties={"int_value": 20}, + ) response = self.client.post( f"/api/projects/{self.team.id}/insights/funnel/", { @@ -2361,8 +2478,14 @@ def test_insight_funnels_hogql_global_filters(self) -> None: ], "properties": json.dumps( [ - {"key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql"}, - {"key": "like(person.properties.fish, '%fish%')", "type": "hogql"}, + { + "key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, + { + "key": "like(person.properties.fish, '%fish%')", + "type": "hogql", + }, ] ), "funnel_window_days": 14, @@ -2381,9 +2504,23 @@ def test_insight_funnels_hogql_global_filters(self) -> None: @also_test_with_materialized_columns(event_properties=["int_value"], person_properties=["fish"]) def test_insight_funnels_hogql_local_filters(self) -> None: with freeze_time("2012-01-15T04:01:34.000Z"): - _create_person(team=self.team, distinct_ids=["1"], properties={"fish": "there is no fish"}) - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"int_value": 1}) - _create_event(team=self.team, event="user did things", distinct_id="1", properties={"int_value": 20}) + _create_person( + team=self.team, + distinct_ids=["1"], + properties={"fish": "there is no fish"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"int_value": 1}, + ) + _create_event( + team=self.team, + event="user did things", + distinct_id="1", + properties={"int_value": 20}, + ) response = self.client.post( f"/api/projects/{self.team.id}/insights/funnel/", { @@ -2394,8 +2531,14 @@ def test_insight_funnels_hogql_local_filters(self) -> None: "order": 0, "properties": json.dumps( [ - {"key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql"}, - {"key": "like(person.properties.fish, '%fish%')", "type": "hogql"}, + { + "key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, + { + "key": "like(person.properties.fish, '%fish%')", + "type": "hogql", + }, ] ), }, @@ -2405,8 +2548,14 @@ def test_insight_funnels_hogql_local_filters(self) -> None: "order": 1, "properties": json.dumps( [ - {"key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql"}, - {"key": "like(person.properties.fish, '%fish%')", "type": "hogql"}, + { + "key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, + { + "key": "like(person.properties.fish, '%fish%')", + "type": "hogql", + }, ] ), }, @@ -2427,9 +2576,23 @@ def test_insight_funnels_hogql_local_filters(self) -> None: @also_test_with_materialized_columns(event_properties=["int_value"], person_properties=["fish"]) def test_insight_funnels_hogql_breakdown(self) -> None: with freeze_time("2012-01-15T04:01:34.000Z"): - _create_person(team=self.team, distinct_ids=["1"], properties={"fish": "there is no fish"}) - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"int_value": 1}) - _create_event(team=self.team, event="user did things", distinct_id="1", properties={"int_value": 20}) + _create_person( + team=self.team, + distinct_ids=["1"], + properties={"fish": "there is no fish"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"int_value": 1}, + ) + _create_event( + team=self.team, + event="user did things", + distinct_id="1", + properties={"int_value": 20}, + ) response = self.client.post( f"/api/projects/{self.team.id}/insights/funnel/", { @@ -2441,7 +2604,10 @@ def test_insight_funnels_hogql_breakdown(self) -> None: ], "properties": json.dumps( [ - {"key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql"}, + { + "key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, ] ), "funnel_window_days": 14, @@ -2465,9 +2631,23 @@ def test_insight_funnels_hogql_breakdown(self) -> None: @also_test_with_materialized_columns(event_properties=["int_value"], person_properties=["fish"]) def test_insight_funnels_hogql_breakdown_single(self) -> None: with freeze_time("2012-01-15T04:01:34.000Z"): - _create_person(team=self.team, distinct_ids=["1"], properties={"fish": "there is no fish"}) - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"int_value": 1}) - _create_event(team=self.team, event="user did things", distinct_id="1", properties={"int_value": 20}) + _create_person( + team=self.team, + distinct_ids=["1"], + properties={"fish": "there is no fish"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"int_value": 1}, + ) + _create_event( + team=self.team, + event="user did things", + distinct_id="1", + properties={"int_value": 20}, + ) response = self.client.post( f"/api/projects/{self.team.id}/insights/funnel/", { @@ -2479,7 +2659,10 @@ def test_insight_funnels_hogql_breakdown_single(self) -> None: ], "properties": json.dumps( [ - {"key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql"}, + { + "key": "toInt(properties.int_value) < 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, ] ), "funnel_window_days": 14, @@ -2502,21 +2685,49 @@ def test_insight_funnels_hogql_breakdown_single(self) -> None: def test_insight_funnels_hogql_aggregating_steps(self) -> None: with freeze_time("2012-01-15T04:01:34.000Z"): _create_person(team=self.team, distinct_ids=["1"], properties={"int_value": 1}) - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"$browser": "Chrome"}) - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"$browser": "Firefox"}) - _create_event(team=self.team, event="user did things", distinct_id="1", properties={"$browser": "Chrome"}) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"$browser": "Firefox"}, + ) + _create_event( + team=self.team, + event="user did things", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) response = self.client.post( f"/api/projects/{self.team.id}/insights/funnel/", { "insight": "FUNNELS", "entity_type": "events", "events": [ - {"id": "user signed up", "type": "events", "order": 0, "math": "total"}, - {"id": "user did things", "type": "events", "order": 1, "math": "total"}, + { + "id": "user signed up", + "type": "events", + "order": 0, + "math": "total", + }, + { + "id": "user did things", + "type": "events", + "order": 1, + "math": "total", + }, ], "properties": json.dumps( [ - {"key": "toInt(person.properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql"}, + { + "key": "toInt(person.properties.int_value) < 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, ] ), "funnel_aggregate_by_hogql": "properties.$browser", @@ -2535,11 +2746,26 @@ def test_insight_funnels_hogql_aggregating_steps(self) -> None: def test_insight_funnels_hogql_aggregating_time_to_convert(self) -> None: with freeze_time("2012-01-15T04:01:34.000Z"): _create_person(team=self.team, distinct_ids=["1"], properties={"int_value": 1}) - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"$browser": "Chrome"}) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) with freeze_time("2012-01-15T04:01:36.500Z"): - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"$browser": "Firefox"}) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"$browser": "Firefox"}, + ) with freeze_time("2012-01-15T04:01:38.200Z"): - _create_event(team=self.team, event="user did things", distinct_id="1", properties={"$browser": "Chrome"}) + _create_event( + team=self.team, + event="user did things", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) with freeze_time("2012-01-16T04:01:38.200Z"): response = self.client.post( f"/api/projects/{self.team.id}/insights/funnel/", @@ -2547,12 +2773,25 @@ def test_insight_funnels_hogql_aggregating_time_to_convert(self) -> None: "insight": "FUNNELS", "entity_type": "events", "events": [ - {"id": "user signed up", "type": "events", "order": 0, "math": "total"}, - {"id": "user did things", "type": "events", "order": 1, "math": "total"}, + { + "id": "user signed up", + "type": "events", + "order": 0, + "math": "total", + }, + { + "id": "user did things", + "type": "events", + "order": 1, + "math": "total", + }, ], "properties": json.dumps( [ - {"key": "toInt(person.properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql"}, + { + "key": "toInt(person.properties.int_value) < 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, ] ), "funnel_aggregate_by_hogql": "properties.$browser", @@ -2570,11 +2809,26 @@ def test_insight_funnels_hogql_aggregating_time_to_convert(self) -> None: def test_insight_funnels_hogql_aggregating_trends(self) -> None: with freeze_time("2012-01-15T04:01:34.000Z"): _create_person(team=self.team, distinct_ids=["1"], properties={"int_value": 1}) - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"$browser": "Chrome"}) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) with freeze_time("2012-01-15T04:01:36.500Z"): - _create_event(team=self.team, event="user signed up", distinct_id="1", properties={"$browser": "Firefox"}) + _create_event( + team=self.team, + event="user signed up", + distinct_id="1", + properties={"$browser": "Firefox"}, + ) with freeze_time("2012-01-15T04:01:38.200Z"): - _create_event(team=self.team, event="user did things", distinct_id="1", properties={"$browser": "Chrome"}) + _create_event( + team=self.team, + event="user did things", + distinct_id="1", + properties={"$browser": "Chrome"}, + ) with freeze_time("2012-01-16T04:01:38.200Z"): response = self.client.post( f"/api/projects/{self.team.id}/insights/funnel/", @@ -2587,7 +2841,10 @@ def test_insight_funnels_hogql_aggregating_trends(self) -> None: ], "properties": json.dumps( [ - {"key": "toInt(person.properties.int_value) < 10 and 'bla' != 'a%sd'", "type": "hogql"}, + { + "key": "toInt(person.properties.int_value) < 10 and 'bla' != 'a%sd'", + "type": "hogql", + }, ] ), "funnel_aggregate_by_hogql": "properties.$browser", @@ -2597,7 +2854,10 @@ def test_insight_funnels_hogql_aggregating_trends(self) -> None: self.assertEqual(response.status_code, status.HTTP_200_OK) response_json = response.json() self.assertEqual(len(response_json["result"]), 1) - self.assertEqual(response_json["result"][0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 50.0, 0.0]) + self.assertEqual( + response_json["result"][0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 50.0, 0.0], + ) self.assertEqual( response_json["result"][0]["days"], [ @@ -2658,8 +2918,14 @@ def test_insight_retention_hogql(self) -> None: data={ "properties": json.dumps( [ - {"key": "toInt(properties.int_value) > 100 and 'bla' != 'a%sd'", "type": "hogql"}, - {"key": "like(person.properties.email, '%test.com%')", "type": "hogql"}, + { + "key": "toInt(properties.int_value) > 100 and 'bla' != 'a%sd'", + "type": "hogql", + }, + { + "key": "like(person.properties.email, '%test.com%')", + "type": "hogql", + }, ] ), }, @@ -2672,8 +2938,14 @@ def test_insight_retention_hogql(self) -> None: data={ "properties": json.dumps( [ - {"key": "toInt(properties.int_value) > 0 and 'bla' != 'a%sd'", "type": "hogql"}, - {"key": "like(person.properties.email, '%test.com%')", "type": "hogql"}, + { + "key": "toInt(properties.int_value) > 0 and 'bla' != 'a%sd'", + "type": "hogql", + }, + { + "key": "like(person.properties.email, '%test.com%')", + "type": "hogql", + }, ] ), }, diff --git a/posthog/api/test/test_insight_funnels.py b/posthog/api/test/test_insight_funnels.py index 17f551e77194d..cdbcaf573b7ec 100644 --- a/posthog/api/test/test_insight_funnels.py +++ b/posthog/api/test/test_insight_funnels.py @@ -4,14 +4,21 @@ from django.test.client import Client from rest_framework import status -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for class ClickhouseTestFunnelTypes(ClickhouseTestMixin, APIBaseTest): def test_funnel_unordered_basic_post(self): journeys_for( - {"1": [{"event": "step one"}, {"event": "step two"}], "2": [{"event": "step one"}, {"event": "step two"}]}, + { + "1": [{"event": "step one"}, {"event": "step two"}], + "2": [{"event": "step one"}, {"event": "step two"}], + }, self.team, ) @@ -48,16 +55,46 @@ def test_unordered_funnel_with_breakdown_by_event_property(self): events = { "person1": [ - {"event": "sign up", "timestamp": "2020-01-01", "properties": person1_properties}, - {"event": "buy", "timestamp": "2020-01-02", "properties": person1_properties}, - {"event": "play movie", "timestamp": "2020-01-03", "properties": person1_properties}, + { + "event": "sign up", + "timestamp": "2020-01-01", + "properties": person1_properties, + }, + { + "event": "buy", + "timestamp": "2020-01-02", + "properties": person1_properties, + }, + { + "event": "play movie", + "timestamp": "2020-01-03", + "properties": person1_properties, + }, ], "person2": [ - {"event": "buy", "timestamp": "2020-01-01", "properties": person2_properties}, - {"event": "sign up", "timestamp": "2020-01-02", "properties": person2_properties}, - {"event": "play movie", "timestamp": "2020-01-03", "properties": person2_properties}, + { + "event": "buy", + "timestamp": "2020-01-01", + "properties": person2_properties, + }, + { + "event": "sign up", + "timestamp": "2020-01-02", + "properties": person2_properties, + }, + { + "event": "play movie", + "timestamp": "2020-01-03", + "properties": person2_properties, + }, + ], + "person3": [ + { + "event": "sign up", + "timestamp": "2020-01-01", + "properties": person3_properties, + } ], - "person3": [{"event": "sign up", "timestamp": "2020-01-01", "properties": person3_properties}], } journeys_for(team=self.team, events_by_person=events) @@ -65,7 +102,11 @@ def test_unordered_funnel_with_breakdown_by_event_property(self): response = self.client.post( f"/api/projects/{self.team.pk}/insights/funnel/", { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": "FUNNELS", "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -80,17 +121,41 @@ def test_unordered_funnel_with_breakdown_by_event_property(self): { "breakdown_value": "Chrome", "steps": [ - {"name": "Completed 1 step", "converted": ["person1"], "dropped": []}, - {"name": "Completed 2 steps", "converted": ["person1"], "dropped": []}, - {"name": "Completed 3 steps", "converted": ["person1"], "dropped": []}, + { + "name": "Completed 1 step", + "converted": ["person1"], + "dropped": [], + }, + { + "name": "Completed 2 steps", + "converted": ["person1"], + "dropped": [], + }, + { + "name": "Completed 3 steps", + "converted": ["person1"], + "dropped": [], + }, ], }, { "breakdown_value": "Safari", "steps": [ - {"name": "Completed 1 step", "converted": ["person2", "person3"], "dropped": []}, - {"name": "Completed 2 steps", "converted": ["person2"], "dropped": ["person3"]}, - {"name": "Completed 3 steps", "converted": ["person2"], "dropped": []}, + { + "name": "Completed 1 step", + "converted": ["person2", "person3"], + "dropped": [], + }, + { + "name": "Completed 2 steps", + "converted": ["person2"], + "dropped": ["person3"], + }, + { + "name": "Completed 3 steps", + "converted": ["person2"], + "dropped": [], + }, ], }, ] @@ -138,13 +203,33 @@ def test_strict_funnel_with_breakdown_by_event_property(self): events = { "person1": [ - {"event": "sign up", "timestamp": "2020-01-01", "properties": chrome_properties}, - {"event": "play movie", "timestamp": "2020-01-02", "properties": chrome_properties}, - {"event": "buy", "timestamp": "2020-01-03", "properties": chrome_properties}, + { + "event": "sign up", + "timestamp": "2020-01-01", + "properties": chrome_properties, + }, + { + "event": "play movie", + "timestamp": "2020-01-02", + "properties": chrome_properties, + }, + { + "event": "buy", + "timestamp": "2020-01-03", + "properties": chrome_properties, + }, ], "person2": [ - {"event": "sign up", "timestamp": "2020-01-01", "properties": safari_properties}, - {"event": "play movie", "timestamp": "2020-01-02", "properties": safari_properties}, + { + "event": "sign up", + "timestamp": "2020-01-01", + "properties": safari_properties, + }, + { + "event": "play movie", + "timestamp": "2020-01-02", + "properties": safari_properties, + }, { # This person should not convert here as we're in strict mode, # and this event is not in the funnel definition @@ -152,9 +237,19 @@ def test_strict_funnel_with_breakdown_by_event_property(self): "timestamp": "2020-01-03", "properties": safari_properties, }, - {"event": "buy", "timestamp": "2020-01-04", "properties": safari_properties}, + { + "event": "buy", + "timestamp": "2020-01-04", + "properties": safari_properties, + }, + ], + "person3": [ + { + "event": "sign up", + "timestamp": "2020-01-01", + "properties": safari_properties, + } ], - "person3": [{"event": "sign up", "timestamp": "2020-01-01", "properties": safari_properties}], } journeys_for(team=self.team, events_by_person=events) @@ -162,7 +257,11 @@ def test_strict_funnel_with_breakdown_by_event_property(self): response = self.client.post( f"/api/projects/{self.team.pk}/insights/funnel/", { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": "FUNNELS", "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -185,8 +284,16 @@ def test_strict_funnel_with_breakdown_by_event_property(self): { "breakdown_value": "Safari", "steps": [ - {"name": "sign up", "converted": ["person2", "person3"], "dropped": []}, - {"name": "play movie", "converted": ["person2"], "dropped": ["person3"]}, + { + "name": "sign up", + "converted": ["person2", "person3"], + "dropped": [], + }, + { + "name": "play movie", + "converted": ["person2"], + "dropped": ["person3"], + }, {"name": "buy", "converted": [], "dropped": ["person2"]}, ], }, @@ -203,16 +310,46 @@ def test_funnel_with_breakdown_by_event_property(self): events = { "person1": [ - {"event": "sign up", "timestamp": "2020-01-01", "properties": person1_properties}, - {"event": "play movie", "timestamp": "2020-01-02", "properties": person1_properties}, - {"event": "buy", "timestamp": "2020-01-03", "properties": person1_properties}, + { + "event": "sign up", + "timestamp": "2020-01-01", + "properties": person1_properties, + }, + { + "event": "play movie", + "timestamp": "2020-01-02", + "properties": person1_properties, + }, + { + "event": "buy", + "timestamp": "2020-01-03", + "properties": person1_properties, + }, ], "person2": [ - {"event": "sign up", "timestamp": "2020-01-01", "properties": person2_properties}, - {"event": "play movie", "timestamp": "2020-01-02", "properties": person2_properties}, - {"event": "buy", "timestamp": "2020-01-03", "properties": person2_properties}, + { + "event": "sign up", + "timestamp": "2020-01-01", + "properties": person2_properties, + }, + { + "event": "play movie", + "timestamp": "2020-01-02", + "properties": person2_properties, + }, + { + "event": "buy", + "timestamp": "2020-01-03", + "properties": person2_properties, + }, + ], + "person3": [ + { + "event": "sign up", + "timestamp": "2020-01-01", + "properties": person3_properties, + } ], - "person3": [{"event": "sign up", "timestamp": "2020-01-01", "properties": person3_properties}], } journeys_for(team=self.team, events_by_person=events) @@ -220,7 +357,11 @@ def test_funnel_with_breakdown_by_event_property(self): response = self.client.post( f"/api/projects/{self.team.pk}/insights/funnel/", { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": "FUNNELS", "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -242,8 +383,16 @@ def test_funnel_with_breakdown_by_event_property(self): { "breakdown_value": "Safari", "steps": [ - {"name": "sign up", "converted": ["person2", "person3"], "dropped": []}, - {"name": "play movie", "converted": ["person2"], "dropped": ["person3"]}, + { + "name": "sign up", + "converted": ["person2", "person3"], + "dropped": [], + }, + { + "name": "play movie", + "converted": ["person2"], + "dropped": ["person3"], + }, {"name": "buy", "converted": ["person2"], "dropped": []}, ], }, @@ -577,16 +726,24 @@ def test_funnel_time_to_convert_auto_bins_unordered(self): def test_funnel_invalid_action_handled(self): response = self.client.post( - f"/api/projects/{self.team.id}/insights/funnel/", {"actions": [{"id": 666, "type": "actions", "order": 0}]} + f"/api/projects/{self.team.id}/insights/funnel/", + {"actions": [{"id": 666, "type": "actions", "order": 0}]}, ) self.assertEqual(response.status_code, 400) - self.assertEqual(response.json(), self.validation_error_response("Action ID 666 does not exist!")) + self.assertEqual( + response.json(), + self.validation_error_response("Action ID 666 does not exist!"), + ) def test_funnel_basic_exclusions(self): journeys_for( { - "1": [{"event": "step one"}, {"event": "step x"}, {"event": "step two"}], + "1": [ + {"event": "step one"}, + {"event": "step x"}, + {"event": "step two"}, + ], "2": [{"event": "step one"}, {"event": "step two"}], }, self.team, @@ -599,7 +756,14 @@ def test_funnel_basic_exclusions(self): {"id": "step one", "type": "events", "order": 0}, {"id": "step two", "type": "events", "order": 1}, ], - "exclusions": [{"id": "step x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}], + "exclusions": [ + { + "id": "step x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + } + ], "funnel_window_days": 14, "insight": "funnels", }, @@ -622,7 +786,11 @@ def test_funnel_basic_exclusions(self): def test_funnel_invalid_exclusions(self): journeys_for( { - "1": [{"event": "step one"}, {"event": "step x"}, {"event": "step two"}], + "1": [ + {"event": "step one"}, + {"event": "step x"}, + {"event": "step two"}, + ], "2": [{"event": "step one"}, {"event": "step two"}], }, self.team, @@ -660,7 +828,8 @@ def test_funnel_invalid_exclusions(self): if error: self.assertEqual(response.status_code, 400) self.assertEqual( - response.json(), self.validation_error_response("Exclusion event can't be the same as funnel step") + response.json(), + self.validation_error_response("Exclusion event can't be the same as funnel step"), ) else: self.assertEqual(response.status_code, 200) @@ -669,12 +838,24 @@ def test_single_property_breakdown(self): journeys_for( { "person1": [ - {"event": "$pageview", "properties": {"$browser": "Chrome", "$browser_version": 95}}, - {"event": "$pageleave", "properties": {"$browser": "Chrome", "$browser_version": 95}}, + { + "event": "$pageview", + "properties": {"$browser": "Chrome", "$browser_version": 95}, + }, + { + "event": "$pageleave", + "properties": {"$browser": "Chrome", "$browser_version": 95}, + }, ], "person2": [ - {"event": "$pageview", "properties": {"$browser": "Safari", "$browser_version": 11}}, - {"event": "$pageview", "properties": {"$browser": "Safari", "$browser_version": 11}}, + { + "event": "$pageview", + "properties": {"$browser": "Safari", "$browser_version": 11}, + }, + { + "event": "$pageview", + "properties": {"$browser": "Safari", "$browser_version": 11}, + }, ], }, self.team, @@ -686,7 +867,12 @@ def test_single_property_breakdown(self): "actions": [], "events": [ {"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}, - {"id": "$pageleave", "name": "$pageleave", "type": "events", "order": 1}, + { + "id": "$pageleave", + "name": "$pageleave", + "type": "events", + "order": 1, + }, ], "display": "FunnelViz", "interval": "day", @@ -699,7 +885,10 @@ def test_single_property_breakdown(self): "funnel_to_step": 1, } - response = self.client.post(f"/api/projects/{self.team.id}/insights/funnel?refresh=true", filter_with_breakdown) + response = self.client.post( + f"/api/projects/{self.team.id}/insights/funnel?refresh=true", + filter_with_breakdown, + ) self.assertEqual(200, response.status_code) response_data = response.json() @@ -729,12 +918,24 @@ def test_multi_property_breakdown(self): journeys_for( { "person1": [ - {"event": "$pageview", "properties": {"$browser": "Chrome", "$browser_version": 95}}, - {"event": "$pageleave", "properties": {"$browser": "Chrome", "$browser_version": 95}}, + { + "event": "$pageview", + "properties": {"$browser": "Chrome", "$browser_version": 95}, + }, + { + "event": "$pageleave", + "properties": {"$browser": "Chrome", "$browser_version": 95}, + }, ], "person2": [ - {"event": "$pageview", "properties": {"$browser": "Safari", "$browser_version": 11}}, - {"event": "$pageview", "properties": {"$browser": "Safari", "$browser_version": 11}}, + { + "event": "$pageview", + "properties": {"$browser": "Safari", "$browser_version": 11}, + }, + { + "event": "$pageview", + "properties": {"$browser": "Safari", "$browser_version": 11}, + }, ], }, self.team, @@ -746,7 +947,12 @@ def test_multi_property_breakdown(self): "actions": [], "events": [ {"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}, - {"id": "$pageleave", "name": "$pageleave", "type": "events", "order": 1}, + { + "id": "$pageleave", + "name": "$pageleave", + "type": "events", + "order": 1, + }, ], "display": "FunnelViz", "interval": "day", @@ -759,7 +965,10 @@ def test_multi_property_breakdown(self): "funnel_to_step": 1, } - response = self.client.post(f"/api/projects/{self.team.id}/insights/funnel?refresh=true", filter_with_breakdown) + response = self.client.post( + f"/api/projects/{self.team.id}/insights/funnel?refresh=true", + filter_with_breakdown, + ) self.assertEqual(200, response.status_code) response_data = response.json() @@ -824,7 +1033,11 @@ def get_converted_and_dropped_people(client: Client, step): dropped_people = dropped_people_response.json()["results"][0]["people"] dropped_distinct_ids = [distinct_id for people in dropped_people for distinct_id in people["distinct_ids"]] - return {"name": step["name"], "converted": sorted(converted_distinct_ids), "dropped": sorted(dropped_distinct_ids)} + return { + "name": step["name"], + "converted": sorted(converted_distinct_ids), + "dropped": sorted(dropped_distinct_ids), + } def get_funnel_people_breakdown_by_step(client: Client, funnel_response): diff --git a/posthog/api/test/test_insight_query.py b/posthog/api/test/test_insight_query.py index 1b6b8c0453881..a2f2b8e9ab9da 100644 --- a/posthog/api/test/test_insight_query.py +++ b/posthog/api/test/test_insight_query.py @@ -29,7 +29,14 @@ def test_can_save_valid_events_query_to_an_insight(self) -> None: "properties.$lib", "timestamp", ], - "properties": [{"type": "event", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "event", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], "limit": 100, }, }, @@ -52,7 +59,14 @@ def test_can_save_valid_events_table_query_to_an_insight(self) -> None: "properties.$lib", "timestamp", ], - "properties": [{"type": "event", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "event", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], "limit": 100, }, }, @@ -69,7 +83,14 @@ def test_can_save_valid_persons_table_query_to_an_insight(self) -> None: "columns": ["person", "id", "created_at", "person.$delete"], "source": { "kind": "PersonsNode", - "properties": [{"type": "person", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "person", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], }, }, }, @@ -85,7 +106,14 @@ def test_no_default_filters_on_insight_query(self) -> None: "columns": ["person", "id", "created_at", "person.$delete"], "source": { "kind": "PersonsNode", - "properties": [{"type": "person", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "person", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], }, }, }, @@ -151,14 +179,22 @@ def test_can_save_insights_query_to_an_insight(self) -> None: "custom_name": "Views", "event": "$pageview", "properties": [ - {"type": "event", "key": "$browser", "operator": "exact", "value": "Chrome"}, + { + "type": "event", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + }, {"type": "cohort", "key": "id", "value": 2}, ], "limit": 100, } ], "trendsFilter": {"display": "ActionsAreaGraph"}, - "breakdown": {"breakdown": "$geoip_country_code", "breakdown_type": "event"}, + "breakdown": { + "breakdown": "$geoip_country_code", + "breakdown_type": "event", + }, }, }, ) @@ -177,7 +213,14 @@ def test_cannot_save_invalid_persons_table_query_to_an_insight(self) -> None: "kind": "DataTableNode", "source": { "kind": "PersonsNode", - "properties": [{"type": "person", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "person", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], }, }, }, @@ -194,7 +237,14 @@ def test_listing_insights_by_default_does_not_include_those_with_only_queries(se "columns": ["person", "id", "created_at", "person.$delete"], "source": { "kind": "PersonsNode", - "properties": [{"type": "person", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "person", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], }, }, }, @@ -217,7 +267,14 @@ def test_can_list_insights_including_those_with_only_queries(self) -> None: "columns": ["person", "id", "created_at", "person.$delete"], "source": { "kind": "PersonsNode", - "properties": [{"type": "person", "key": "$browser", "operator": "exact", "value": "Chrome"}], + "properties": [ + { + "type": "person", + "key": "$browser", + "operator": "exact", + "value": "Chrome", + } + ], }, }, }, diff --git a/posthog/api/test/test_instance_settings.py b/posthog/api/test/test_instance_settings.py index 95d51c11baabd..cbc0d31abe088 100644 --- a/posthog/api/test/test_instance_settings.py +++ b/posthog/api/test/test_instance_settings.py @@ -1,8 +1,14 @@ from django.core import mail from rest_framework import status -from posthog.api.instance_settings import get_instance_setting as get_instance_setting_helper -from posthog.models.instance_setting import get_instance_setting, override_instance_config, set_instance_setting +from posthog.api.instance_settings import ( + get_instance_setting as get_instance_setting_helper, +) +from posthog.models.instance_setting import ( + get_instance_setting, + override_instance_config, + set_instance_setting, +) from posthog.settings import CONSTANCE_CONFIG from posthog.test.base import APIBaseTest @@ -14,7 +20,6 @@ def setUp(self): self.user.save() def test_list_instance_settings(self): - response = self.client.get(f"/api/instance_settings/") self.assertEqual(response.status_code, status.HTTP_200_OK) json_response = response.json() @@ -41,7 +46,6 @@ def test_list_instance_settings(self): self.assertEqual(item["value"], "") def test_can_retrieve_setting(self): - response = self.client.get(f"/api/instance_settings/AUTO_START_ASYNC_MIGRATIONS") self.assertEqual(response.status_code, status.HTTP_200_OK) json_response = response.json() @@ -56,7 +60,6 @@ def test_can_retrieve_setting(self): self.assertEqual(json_response["editable"], True) def test_retrieve_secret_setting(self): - response = self.client.get(f"/api/instance_settings/EMAIL_HOST_PASSWORD") self.assertEqual(response.status_code, status.HTTP_200_OK) json_response = response.json() @@ -83,13 +86,15 @@ def test_non_staff_user_cant_list_or_retrieve(self): response = self.client.get(f"/api/instance_settings/") self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("You are not a staff user, contact your instance admin.") + response.json(), + self.permission_denied_response("You are not a staff user, contact your instance admin."), ) response = self.client.get(f"/api/instance_settings/AUTO_START_ASYNC_MIGRATIONS") self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("You are not a staff user, contact your instance admin.") + response.json(), + self.permission_denied_response("You are not a staff user, contact your instance admin."), ) def test_update_setting(self): @@ -108,7 +113,8 @@ def test_updating_email_settings(self): set_instance_setting("EMAIL_HOST", "localhost") with self.settings(SITE_URL="http://localhost:8000", CELERY_TASK_ALWAYS_EAGER=True): response = self.client.patch( - f"/api/instance_settings/EMAIL_DEFAULT_FROM", {"value": "hellohello@posthog.com"} + f"/api/instance_settings/EMAIL_DEFAULT_FROM", + {"value": "hellohello@posthog.com"}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json()["value"], "hellohello@posthog.com") @@ -116,11 +122,16 @@ def test_updating_email_settings(self): self.assertEqual(mail.outbox[0].from_email, "hellohello@posthog.com") self.assertEqual(mail.outbox[0].subject, "This is a test email of your PostHog instance") html_message = mail.outbox[0].alternatives[0][0] # type: ignore - self.validate_basic_html(html_message, "http://localhost:8000", preheader="Email successfully set up!") + self.validate_basic_html( + html_message, + "http://localhost:8000", + preheader="Email successfully set up!", + ) def test_update_integer_setting(self): response = self.client.patch( - f"/api/instance_settings/ASYNC_MIGRATIONS_ROLLBACK_TIMEOUT", {"value": 48343943943} + f"/api/instance_settings/ASYNC_MIGRATIONS_ROLLBACK_TIMEOUT", + {"value": 48343943943}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(response.json()["value"], 48343943943) @@ -147,7 +158,8 @@ def test_non_staff_user_cant_update(self): response = self.client.get(f"/api/instance_settings/AUTO_START_ASYNC_MIGRATIONS", {"value": True}) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("You are not a staff user, contact your instance admin.") + response.json(), + self.permission_denied_response("You are not a staff user, contact your instance admin."), ) self.assertEqual(get_instance_setting_helper("AUTO_START_ASYNC_MIGRATIONS").value, False) diff --git a/posthog/api/test/test_instance_status.py b/posthog/api/test/test_instance_status.py index a499451f01c89..9799c3f19bc17 100644 --- a/posthog/api/test/test_instance_status.py +++ b/posthog/api/test/test_instance_status.py @@ -10,8 +10,14 @@ class TestInstanceStatus(APIBaseTest): @pytest.mark.skip_on_multitenancy def test_instance_status_routes(self): self.assertEqual(self.client.get("/api/instance_status").status_code, status.HTTP_200_OK) - self.assertEqual(self.client.get("/api/instance_status/navigation").status_code, status.HTTP_200_OK) - self.assertEqual(self.client.get("/api/instance_status/queries").status_code, status.HTTP_200_OK) + self.assertEqual( + self.client.get("/api/instance_status/navigation").status_code, + status.HTTP_200_OK, + ) + self.assertEqual( + self.client.get("/api/instance_status/queries").status_code, + status.HTTP_200_OK, + ) def test_object_storage_when_disabled(self): with self.settings(OBJECT_STORAGE_ENABLED=False): @@ -20,7 +26,14 @@ def test_object_storage_when_disabled(self): object_storage_metrics = [o for o in json["results"]["overview"] if o.get("key", None) == "object_storage"] self.assertEqual( - object_storage_metrics, [{"key": "object_storage", "metric": "Object Storage enabled", "value": False}] + object_storage_metrics, + [ + { + "key": "object_storage", + "metric": "Object Storage enabled", + "value": False, + } + ], ) @patch("posthog.storage.object_storage._client") @@ -35,8 +48,16 @@ def test_object_storage_when_enabled_but_unhealthy(self, patched_s3_client): self.assertEqual( object_storage_metrics, [ - {"key": "object_storage", "metric": "Object Storage enabled", "value": True}, - {"key": "object_storage", "metric": "Object Storage healthy", "value": False}, + { + "key": "object_storage", + "metric": "Object Storage enabled", + "value": True, + }, + { + "key": "object_storage", + "metric": "Object Storage healthy", + "value": False, + }, ], ) @@ -52,8 +73,16 @@ def test_object_storage_when_enabled_and_healthy(self, patched_s3_client): self.assertEqual( object_storage_metrics, [ - {"key": "object_storage", "metric": "Object Storage enabled", "value": True}, - {"key": "object_storage", "metric": "Object Storage healthy", "value": True}, + { + "key": "object_storage", + "metric": "Object Storage enabled", + "value": True, + }, + { + "key": "object_storage", + "metric": "Object Storage healthy", + "value": True, + }, ], ) diff --git a/posthog/api/test/test_kafka_inspector.py b/posthog/api/test/test_kafka_inspector.py index 66cc38efab689..6a42741a47ff1 100644 --- a/posthog/api/test/test_kafka_inspector.py +++ b/posthog/api/test/test_kafka_inspector.py @@ -23,29 +23,40 @@ def _to_json(self, data: Union[Dict, List]) -> str: ) def test_fetch_message(self, _): response = self.client.post( - "/api/kafka_inspector/fetch_message", data={"topic": "foo", "partition": 1, "offset": 0} + "/api/kafka_inspector/fetch_message", + data={"topic": "foo", "partition": 1, "offset": 0}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( response.json(), - {"key": "k", "offset": 0, "partition": 0, "timestamp": 1650375470233, "topic": "foo", "value": "v"}, + { + "key": "k", + "offset": 0, + "partition": 0, + "timestamp": 1650375470233, + "topic": "foo", + "value": "v", + }, ) def test_fetch_message_invalid_params(self): response = self.client.post( - "/api/kafka_inspector/fetch_message", data={"topic": "foo", "partition": "1", "offset": 0} + "/api/kafka_inspector/fetch_message", + data={"topic": "foo", "partition": "1", "offset": 0}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json(), {"error": "Invalid partition."}) response = self.client.post( - "/api/kafka_inspector/fetch_message", data={"topic": 42, "partition": 1, "offset": 0} + "/api/kafka_inspector/fetch_message", + data={"topic": 42, "partition": 1, "offset": 0}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json(), {"error": "Invalid topic."}) response = self.client.post( - "/api/kafka_inspector/fetch_message", data={"topic": "foo", "partition": 1, "offset": "0"} + "/api/kafka_inspector/fetch_message", + data={"topic": "foo", "partition": 1, "offset": "0"}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(response.json(), {"error": "Invalid offset."}) diff --git a/posthog/api/test/test_organization.py b/posthog/api/test/test_organization.py index 25a4909a0f877..450264b1d768e 100644 --- a/posthog/api/test/test_organization.py +++ b/posthog/api/test/test_organization.py @@ -69,7 +69,8 @@ def test_update_organization_if_admin(self): response_rename = self.client.patch(f"/api/organizations/{self.organization.id}", {"name": "QWERTY"}) response_email = self.client.patch( - f"/api/organizations/{self.organization.id}", {"is_member_join_email_enabled": False} + f"/api/organizations/{self.organization.id}", + {"is_member_join_email_enabled": False}, ) self.assertEqual(response_rename.status_code, status.HTTP_200_OK) @@ -88,7 +89,8 @@ def test_update_organization_if_owner(self): response_rename = self.client.patch(f"/api/organizations/{self.organization.id}", {"name": "QWERTY"}) response_email = self.client.patch( - f"/api/organizations/{self.organization.id}", {"is_member_join_email_enabled": False} + f"/api/organizations/{self.organization.id}", + {"is_member_join_email_enabled": False}, ) self.assertEqual(response_rename.status_code, status.HTTP_200_OK) @@ -103,7 +105,8 @@ def test_cannot_update_organization_if_not_owner_or_admin(self): self.organization_membership.save() response_rename = self.client.patch(f"/api/organizations/{self.organization.id}", {"name": "ASDFG"}) response_email = self.client.patch( - f"/api/organizations/{self.organization.id}", {"is_member_join_email_enabled": False} + f"/api/organizations/{self.organization.id}", + {"is_member_join_email_enabled": False}, ) self.assertEqual(response_rename.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(response_email.status_code, status.HTTP_403_FORBIDDEN) diff --git a/posthog/api/test/test_organization_domain.py b/posthog/api/test/test_organization_domain.py index 2615880cbff20..f7f3e48b0b276 100644 --- a/posthog/api/test/test_organization_domain.py +++ b/posthog/api/test/test_organization_domain.py @@ -9,7 +9,12 @@ from freezegun import freeze_time from rest_framework import status -from posthog.models import Organization, OrganizationDomain, OrganizationMembership, Team +from posthog.models import ( + Organization, + OrganizationDomain, + OrganizationMembership, + Team, +) from posthog.test.base import APIBaseTest, BaseTest @@ -133,7 +138,10 @@ def test_creating_domain_on_self_hosted_is_automatically_verified(self): instance = OrganizationDomain.objects.get(id=response_data["id"]) self.assertEqual(instance.domain, "the.posthog.com") - self.assertEqual(instance.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC"))) + self.assertEqual( + instance.verified_at, + datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC")), + ) self.assertEqual(instance.last_verification_retry, None) self.assertEqual(instance.sso_enforcement, "") @@ -161,7 +169,13 @@ def test_cannot_create_invalid_domain(self): count = OrganizationDomain.objects.count() self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() - invalid_domains = ["test@posthog.com", "🦔🦔🦔.com", "one.two.c", "--alpha.com", "javascript: alert(1)"] + invalid_domains = [ + "test@posthog.com", + "🦔🦔🦔.com", + "one.two.c", + "--alpha.com", + "javascript: alert(1)", + ] for _domain in invalid_domains: response = self.client.post("/api/organizations/@current/domains/", {"domain": _domain}) @@ -186,7 +200,11 @@ def test_can_request_verification_for_unverified_domains(self, mock_dns_query): mock_dns_query.return_value = FakeDNSResponse( [ dns.rrset.from_text( - "_posthog-challenge.myposthog.com.", 3600, "IN", "TXT", self.domain.verification_challenge + "_posthog-challenge.myposthog.com.", + 3600, + "IN", + "TXT", + self.domain.verification_challenge, ) ] ) @@ -197,10 +215,16 @@ def test_can_request_verification_for_unverified_domains(self, mock_dns_query): response_data = response.json() self.domain.refresh_from_db() self.assertEqual(response_data["domain"], "myposthog.com") - self.assertEqual(response_data["verified_at"], self.domain.verified_at.strftime("%Y-%m-%dT%H:%M:%SZ")) + self.assertEqual( + response_data["verified_at"], + self.domain.verified_at.strftime("%Y-%m-%dT%H:%M:%SZ"), + ) self.assertEqual(response_data["is_verified"], True) - self.assertEqual(self.domain.verified_at, datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC"))) + self.assertEqual( + self.domain.verified_at, + datetime.datetime(2021, 8, 8, 20, 20, 8, tzinfo=ZoneInfo("UTC")), + ) self.assertEqual(self.domain.is_verified, True) @patch("posthog.models.organization_domain.dns.resolver.resolve") @@ -220,7 +244,8 @@ def test_domain_is_not_verified_with_missing_challenge(self, mock_dns_query): self.assertEqual(response_data["verified_at"], None) self.assertEqual(self.domain.verified_at, None) self.assertEqual( - self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")) + self.domain.last_verification_retry, + datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")), ) @patch("posthog.models.organization_domain.dns.resolver.resolve") @@ -240,7 +265,8 @@ def test_domain_is_not_verified_with_missing_domain(self, mock_dns_query): self.assertEqual(response_data["verified_at"], None) self.assertEqual(self.domain.verified_at, None) self.assertEqual( - self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")) + self.domain.last_verification_retry, + datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")), ) @patch("posthog.models.organization_domain.dns.resolver.resolve") @@ -249,7 +275,15 @@ def test_domain_is_not_verified_with_incorrect_challenge(self, mock_dns_query): self.organization_membership.save() mock_dns_query.return_value = FakeDNSResponse( - [dns.rrset.from_text("_posthog-challenge.myposthog.com.", 3600, "IN", "TXT", "incorrect_challenge")] + [ + dns.rrset.from_text( + "_posthog-challenge.myposthog.com.", + 3600, + "IN", + "TXT", + "incorrect_challenge", + ) + ] ) with freeze_time("2021-10-10T10:10:10Z"): @@ -262,7 +296,8 @@ def test_domain_is_not_verified_with_incorrect_challenge(self, mock_dns_query): self.assertEqual(response_data["verified_at"], None) self.assertEqual(self.domain.verified_at, None) self.assertEqual( - self.domain.last_verification_retry, datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")) + self.domain.last_verification_retry, + datetime.datetime(2021, 10, 10, 10, 10, 10, tzinfo=ZoneInfo("UTC")), ) def test_cannot_request_verification_for_verified_domains(self): @@ -288,7 +323,8 @@ def test_only_admin_can_create_verified_domains(self): response = self.client.post("/api/organizations/@current/domains/", {"domain": "evil.posthog.com"}) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("Your organization access level is insufficient.") + response.json(), + self.permission_denied_response("Your organization access level is insufficient."), ) self.assertEqual(OrganizationDomain.objects.count(), count) @@ -297,7 +333,8 @@ def test_only_admin_can_request_verification(self): response = self.client.post(f"/api/organizations/@current/domains/{self.domain.id}/verify") self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("Your organization access level is insufficient.") + response.json(), + self.permission_denied_response("Your organization access level is insufficient."), ) self.domain.refresh_from_db() @@ -329,7 +366,8 @@ def test_cannot_enforce_sso_or_enable_jit_provisioning_on_unverified_domain(self # SSO Enforcement response = self.client.patch( - f"/api/organizations/@current/domains/{self.domain.id}/", {"sso_enforcement": "google-oauth2"} + f"/api/organizations/@current/domains/{self.domain.id}/", + {"sso_enforcement": "google-oauth2"}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -346,7 +384,8 @@ def test_cannot_enforce_sso_or_enable_jit_provisioning_on_unverified_domain(self # JIT Provisioning response = self.client.patch( - f"/api/organizations/@current/domains/{self.domain.id}/", {"jit_provisioning_enabled": True} + f"/api/organizations/@current/domains/{self.domain.id}/", + {"jit_provisioning_enabled": True}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -383,7 +422,8 @@ def test_only_admin_can_update_domain(self): ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("Your organization access level is insufficient.") + response.json(), + self.permission_denied_response("Your organization access level is insufficient."), ) self.domain.refresh_from_db() self.assertEqual(self.domain.jit_provisioning_enabled, False) @@ -421,7 +461,8 @@ def test_only_admin_can_delete_domain(self): response = self.client.delete(f"/api/organizations/@current/domains/{self.domain.id}") self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("Your organization access level is insufficient.") + response.json(), + self.permission_denied_response("Your organization access level is insufficient."), ) self.domain.refresh_from_db() diff --git a/posthog/api/test/test_organization_invites.py b/posthog/api/test/test_organization_invites.py index 4ab6f255487cd..0e52252781963 100644 --- a/posthog/api/test/test_organization_invites.py +++ b/posthog/api/test/test_organization_invites.py @@ -5,7 +5,11 @@ from rest_framework import status from posthog.models.instance_setting import set_instance_setting -from posthog.models.organization import Organization, OrganizationInvite, OrganizationMembership +from posthog.models.organization import ( + Organization, + OrganizationInvite, + OrganizationMembership, +) from posthog.test.base import APIBaseTest NAME_SEEDS = ["John", "Jane", "Alice", "Bob", ""] @@ -13,7 +17,6 @@ class TestOrganizationInvitesAPI(APIBaseTest): def helper_generate_bulk_invite_payload(self, count: int): - payload = [] for i in range(0, count): @@ -114,7 +117,11 @@ def test_add_organization_invite_with_email(self, mock_capture): self.user.distinct_id, "team invite executed", properties=capture_props, - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) self.assertEqual(mock_capture.call_count, 2) @@ -188,7 +195,11 @@ def test_allow_bulk_creating_invites(self, mock_capture): "current_member_count": 1, "email_available": True, }, - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) # Assert capture call for invitee @@ -209,7 +220,11 @@ def test_maximum_20_invites_per_request(self): count = OrganizationInvite.objects.count() payload = self.helper_generate_bulk_invite_payload(21) - with self.settings(EMAIL_ENABLED=True, EMAIL_HOST="localhost", SITE_URL="http://test.posthog.com"): + with self.settings( + EMAIL_ENABLED=True, + EMAIL_HOST="localhost", + SITE_URL="http://test.posthog.com", + ): response = self.client.post("/api/organizations/@current/invites/bulk/", payload, format="json") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -234,7 +249,11 @@ def test_invites_are_create_atomically(self): payload = self.helper_generate_bulk_invite_payload(5) payload[4]["target_email"] = None - with self.settings(EMAIL_ENABLED=True, EMAIL_HOST="localhost", SITE_URL="http://test.posthog.com"): + with self.settings( + EMAIL_ENABLED=True, + EMAIL_HOST="localhost", + SITE_URL="http://test.posthog.com", + ): response = self.client.post("/api/organizations/@current/invites/bulk/", payload, format="json") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) @@ -251,8 +270,16 @@ def test_cannot_bulk_create_invites_for_another_organization(self): count = OrganizationInvite.objects.count() payload = self.helper_generate_bulk_invite_payload(3) - with self.settings(EMAIL_ENABLED=True, EMAIL_HOST="localhost", SITE_URL="http://test.posthog.com"): - response = self.client.post(f"/api/organizations/{another_org.id}/invites/bulk/", payload, format="json") + with self.settings( + EMAIL_ENABLED=True, + EMAIL_HOST="localhost", + SITE_URL="http://test.posthog.com", + ): + response = self.client.post( + f"/api/organizations/{another_org.id}/invites/bulk/", + payload, + format="json", + ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual(response.json(), self.permission_denied_response()) diff --git a/posthog/api/test/test_organization_members.py b/posthog/api/test/test_organization_members.py index b60b12de89268..2416e5552fa9a 100644 --- a/posthog/api/test/test_organization_members.py +++ b/posthog/api/test/test_organization_members.py @@ -77,7 +77,8 @@ def test_change_organization_member_level(self): membership = OrganizationMembership.objects.create(user=user, organization=self.organization) self.assertEqual(membership.level, OrganizationMembership.Level.MEMBER) response = self.client.patch( - f"/api/organizations/@current/members/{user.uuid}", {"level": OrganizationMembership.Level.ADMIN} + f"/api/organizations/@current/members/{user.uuid}", + {"level": OrganizationMembership.Level.ADMIN}, ) self.assertEqual(response.status_code, 200) updated_membership = OrganizationMembership.objects.get(user=user, organization=self.organization) @@ -110,7 +111,8 @@ def test_admin_can_promote_to_admin(self): membership = OrganizationMembership.objects.create(user=user, organization=self.organization) self.assertEqual(membership.level, OrganizationMembership.Level.MEMBER) response = self.client.patch( - f"/api/organizations/@current/members/{user.uuid}", {"level": OrganizationMembership.Level.ADMIN} + f"/api/organizations/@current/members/{user.uuid}", + {"level": OrganizationMembership.Level.ADMIN}, ) self.assertEqual(response.status_code, 200) updated_membership = OrganizationMembership.objects.get(user=user, organization=self.organization) @@ -121,7 +123,8 @@ def test_change_organization_member_level_requires_admin(self): membership = OrganizationMembership.objects.create(user=user, organization=self.organization) self.assertEqual(membership.level, OrganizationMembership.Level.MEMBER) response = self.client.patch( - f"/api/organizations/@current/members/{user.uuid}/", {"level": OrganizationMembership.Level.ADMIN} + f"/api/organizations/@current/members/{user.uuid}/", + {"level": OrganizationMembership.Level.ADMIN}, ) updated_membership = OrganizationMembership.objects.get(user=user, organization=self.organization) @@ -141,7 +144,8 @@ def test_cannot_change_own_organization_member_level(self): self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() response = self.client.patch( - f"/api/organizations/@current/members/{self.user.uuid}", {"level": OrganizationMembership.Level.MEMBER} + f"/api/organizations/@current/members/{self.user.uuid}", + {"level": OrganizationMembership.Level.MEMBER}, ) self.organization_membership.refresh_from_db() self.assertEqual(self.organization_membership.level, OrganizationMembership.Level.ADMIN) @@ -164,7 +168,8 @@ def test_pass_ownership(self): self.organization_membership.level = OrganizationMembership.Level.OWNER self.organization_membership.save() response = self.client.patch( - f"/api/organizations/@current/members/{user.uuid}/", {"level": OrganizationMembership.Level.OWNER} + f"/api/organizations/@current/members/{user.uuid}/", + {"level": OrganizationMembership.Level.OWNER}, ) self.organization_membership.refresh_from_db() membership.refresh_from_db() @@ -186,7 +191,8 @@ def test_pass_ownership_only_if_owner(self): self.organization_membership.level = OrganizationMembership.Level.ADMIN self.organization_membership.save() response = self.client.patch( - f"/api/organizations/@current/members/{user.uuid}/", {"level": OrganizationMembership.Level.OWNER} + f"/api/organizations/@current/members/{user.uuid}/", + {"level": OrganizationMembership.Level.OWNER}, ) self.organization_membership.refresh_from_db() membership.refresh_from_db() diff --git a/posthog/api/test/test_person.py b/posthog/api/test/test_person.py index b1d8d443d4d1d..549e13177f26c 100644 --- a/posthog/api/test/test_person.py +++ b/posthog/api/test/test_person.py @@ -29,7 +29,10 @@ class TestPerson(ClickhouseTestMixin, APIBaseTest): def test_legacy_get_person_by_id(self) -> None: person = _create_person( - team=self.team, distinct_ids=["distinct_id"], properties={"email": "someone@gmail.com"}, immediate=True + team=self.team, + distinct_ids=["distinct_id"], + properties={"email": "someone@gmail.com"}, + immediate=True, ) flush_persons_and_events() @@ -42,9 +45,15 @@ def test_legacy_get_person_by_id(self) -> None: @also_test_with_materialized_columns(event_properties=["email"], person_properties=["email"]) @snapshot_clickhouse_queries def test_search(self) -> None: - _create_person(team=self.team, distinct_ids=["distinct_id"], properties={"email": "someone@gmail.com"}) _create_person( - team=self.team, distinct_ids=["distinct_id_2"], properties={"email": "another@gmail.com", "name": "james"} + team=self.team, + distinct_ids=["distinct_id"], + properties={"email": "someone@gmail.com"}, + ) + _create_person( + team=self.team, + distinct_ids=["distinct_id_2"], + properties={"email": "another@gmail.com", "name": "james"}, ) _create_person(team=self.team, distinct_ids=["distinct_id_3"], properties={"name": "jane"}) @@ -60,7 +69,11 @@ def test_search(self) -> None: @also_test_with_materialized_columns(event_properties=["email"], person_properties=["email"]) @snapshot_clickhouse_queries def test_search_person_id(self) -> None: - person = _create_person(team=self.team, distinct_ids=["distinct_id"], properties={"email": "someone@gmail.com"}) + person = _create_person( + team=self.team, + distinct_ids=["distinct_id"], + properties={"email": "someone@gmail.com"}, + ) flush_persons_and_events() response = self.client.get(f"/api/person/?search={person.uuid}") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -69,21 +82,47 @@ def test_search_person_id(self) -> None: @also_test_with_materialized_columns(event_properties=["email"], person_properties=["email"]) @snapshot_clickhouse_queries def test_properties(self) -> None: - _create_person(team=self.team, distinct_ids=["distinct_id"], properties={"email": "someone@gmail.com"}) - _create_person(team=self.team, distinct_ids=["distinct_id_2"], properties={"email": "another@gmail.com"}) + _create_person( + team=self.team, + distinct_ids=["distinct_id"], + properties={"email": "someone@gmail.com"}, + ) + _create_person( + team=self.team, + distinct_ids=["distinct_id_2"], + properties={"email": "another@gmail.com"}, + ) _create_person(team=self.team, distinct_ids=["distinct_id_3"], properties={}) flush_persons_and_events() response = self.client.get( "/api/person/?properties=%s" - % json.dumps([{"key": "email", "operator": "is_set", "value": "is_set", "type": "person"}]) + % json.dumps( + [ + { + "key": "email", + "operator": "is_set", + "value": "is_set", + "type": "person", + } + ] + ) ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.json()["results"]), 2) response = self.client.get( "/api/person/?properties=%s" - % json.dumps([{"key": "email", "operator": "icontains", "value": "another@gm", "type": "person"}]) + % json.dumps( + [ + { + "key": "email", + "operator": "icontains", + "value": "another@gm", + "type": "person", + } + ] + ) ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual(len(response.json()["results"]), 1) @@ -96,9 +135,21 @@ def test_person_property_values(self): team=self.team, properties={"random_prop": "asdf", "some other prop": "with some text"}, ) - _create_person(distinct_ids=["person_2"], team=self.team, properties={"random_prop": "asdf"}) - _create_person(distinct_ids=["person_3"], team=self.team, properties={"random_prop": "qwerty"}) - _create_person(distinct_ids=["person_4"], team=self.team, properties={"something_else": "qwerty"}) + _create_person( + distinct_ids=["person_2"], + team=self.team, + properties={"random_prop": "asdf"}, + ) + _create_person( + distinct_ids=["person_3"], + team=self.team, + properties={"random_prop": "qwerty"}, + ) + _create_person( + distinct_ids=["person_4"], + team=self.team, + properties={"something_else": "qwerty"}, + ) flush_persons_and_events() response = self.client.get("/api/person/values/?key=random_prop") @@ -118,7 +169,6 @@ def test_person_property_values(self): @also_test_with_materialized_columns(event_properties=["email"], person_properties=["email"]) @snapshot_clickhouse_queries def test_filter_person_email(self): - _create_person( team=self.team, distinct_ids=["distinct_id", "another_one"], @@ -127,7 +177,10 @@ def test_filter_person_email(self): immediate=True, ) person2: Person = _create_person( - team=self.team, distinct_ids=["distinct_id_2"], properties={"email": "another@gmail.com"}, immediate=True + team=self.team, + distinct_ids=["distinct_id_2"], + properties={"email": "another@gmail.com"}, + immediate=True, ) flush_persons_and_events() @@ -142,7 +195,6 @@ def test_filter_person_email(self): @snapshot_clickhouse_queries def test_filter_person_prop(self): - _create_person( team=self.team, distinct_ids=["distinct_id", "another_one"], @@ -178,7 +230,10 @@ def test_filter_person_list(self): immediate=True, ) person2: Person = _create_person( - team=self.team, distinct_ids=["distinct_id_2"], properties={"email": "another@gmail.com"}, immediate=True + team=self.team, + distinct_ids=["distinct_id_2"], + properties={"email": "another@gmail.com"}, + immediate=True, ) flush_persons_and_events() @@ -210,13 +265,14 @@ def test_filter_person_list(self): self.assertEqual(len(response.json()["results"]), 0) def test_cant_see_another_organization_pii_with_filters(self): - # Completely different organization another_org: Organization = Organization.objects.create() another_team: Team = Team.objects.create(organization=another_org) _create_person(team=another_team, distinct_ids=["distinct_id", "x_another_one"]) _create_person( - team=another_team, distinct_ids=["x_distinct_id_2"], properties={"email": "team2_another@gmail.com"} + team=another_team, + distinct_ids=["x_distinct_id_2"], + properties={"email": "team2_another@gmail.com"}, ) # Person in current team @@ -237,7 +293,10 @@ def test_cant_see_another_organization_pii_with_filters(self): @freeze_time("2021-08-25T22:09:14.252Z") def test_delete_person(self): person = _create_person( - team=self.team, distinct_ids=["person_1", "anonymous_id"], properties={"$os": "Chrome"}, immediate=True + team=self.team, + distinct_ids=["person_1", "anonymous_id"], + properties={"$os": "Chrome"}, + immediate=True, ) _create_event(event="test", team=self.team, distinct_id="person_1") _create_event(event="test", team=self.team, distinct_id="anonymous_id") @@ -280,7 +339,7 @@ def test_delete_person(self): self.assertEqual([(100, 1, "{}")], ch_persons) # No async deletion is scheduled self.assertEqual(AsyncDeletion.objects.filter(team_id=self.team.id).count(), 0) - ch_events = sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk})[ + ch_events = sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[ 0 ][0] self.assertEqual(ch_events, 3) @@ -288,7 +347,10 @@ def test_delete_person(self): @freeze_time("2021-08-25T22:09:14.252Z") def test_delete_person_and_events(self): person = _create_person( - team=self.team, distinct_ids=["person_1", "anonymous_id"], properties={"$os": "Chrome"}, immediate=True + team=self.team, + distinct_ids=["person_1", "anonymous_id"], + properties={"$os": "Chrome"}, + immediate=True, ) _create_event(event="test", team=self.team, distinct_id="person_1") _create_event(event="test", team=self.team, distinct_id="anonymous_id") @@ -315,7 +377,9 @@ def test_delete_person_and_events(self): def test_split_people_keep_props(self) -> None: # created first person1 = _create_person( - team=self.team, distinct_ids=["1", "2", "3"], properties={"$browser": "whatever", "$os": "Mac OS X"} + team=self.team, + distinct_ids=["1", "2", "3"], + properties={"$browser": "whatever", "$os": "Mac OS X"}, ) self.client.post("/api/person/%s/split/" % person1.pk, {"main_distinct_id": "1"}) @@ -411,7 +475,8 @@ def test_update_multiple_person_properties_validation(self) -> None: self.assertEqual(response.status_code, 400) self.assertEqual( - response.json(), self.validation_error_response("required", "This field is required.", "properties") + response.json(), + self.validation_error_response("required", "This field is required.", "properties"), ) @mock.patch("posthog.api.person.capture_internal") @@ -469,10 +534,17 @@ def test_delete_person_properties(self, mock_capture) -> None: def test_return_non_anonymous_name(self) -> None: _create_person( team=self.team, - distinct_ids=["distinct_id1", "17787c3099427b-0e8f6c86323ea9-33647309-1aeaa0-17787c30995b7c"], + distinct_ids=[ + "distinct_id1", + "17787c3099427b-0e8f6c86323ea9-33647309-1aeaa0-17787c30995b7c", + ], ) _create_person( - team=self.team, distinct_ids=["17787c327b-0e8f623ea9-336473-1aeaa0-17787c30995b7c", "distinct_id2"] + team=self.team, + distinct_ids=[ + "17787c327b-0e8f623ea9-336473-1aeaa0-17787c30995b7c", + "distinct_id2", + ], ) flush_persons_and_events() @@ -487,7 +559,10 @@ def test_return_non_anonymous_name(self) -> None: ) self.assertCountEqual( response["results"][1]["distinct_ids"], - ["distinct_id1", "17787c3099427b-0e8f6c86323ea9-33647309-1aeaa0-17787c30995b7c"], + [ + "distinct_id1", + "17787c3099427b-0e8f6c86323ea9-33647309-1aeaa0-17787c30995b7c", + ], ) def test_person_display_name(self) -> None: @@ -496,12 +571,19 @@ def test_person_display_name(self) -> None: _create_person( team=self.team, distinct_ids=["distinct_id1"], - properties={"custom_name": "someone", "custom_email": "someone@custom.com", "email": "someone@gmail.com"}, + properties={ + "custom_name": "someone", + "custom_email": "someone@custom.com", + "email": "someone@gmail.com", + }, ) _create_person( team=self.team, distinct_ids=["distinct_id2"], - properties={"custom_email": "another_one@custom.com", "email": "another_one@gmail.com"}, + properties={ + "custom_email": "another_one@custom.com", + "email": "another_one@gmail.com", + }, ) _create_person( team=self.team, @@ -542,9 +624,16 @@ def test_person_display_name_defaults(self) -> None: self.assertEqual(results[2]["name"], "distinct_id3") def test_person_cohorts(self) -> None: - _create_person(team=self.team, distinct_ids=["1"], properties={"$some_prop": "something", "number": 1}) + _create_person( + team=self.team, + distinct_ids=["1"], + properties={"$some_prop": "something", "number": 1}, + ) person2 = _create_person( - team=self.team, distinct_ids=["2"], properties={"$some_prop": "something", "number": 2}, immediate=True + team=self.team, + distinct_ids=["2"], + properties={"$some_prop": "something", "number": 2}, + immediate=True, ) cohort1 = Cohort.objects.create( team=self.team, @@ -552,17 +641,25 @@ def test_person_cohorts(self) -> None: name="cohort1", ) cohort2 = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "number", "value": 1, "type": "person"}]}], name="cohort2" + team=self.team, + groups=[{"properties": [{"key": "number", "value": 1, "type": "person"}]}], + name="cohort2", ) cohort3 = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "number", "value": 2, "type": "person"}]}], name="cohort3" + team=self.team, + groups=[{"properties": [{"key": "number", "value": 2, "type": "person"}]}], + name="cohort3", ) cohort1.calculate_people_ch(pending_version=0) cohort2.calculate_people_ch(pending_version=0) cohort3.calculate_people_ch(pending_version=0) cohort4 = Cohort.objects.create( - team=self.team, groups=[], is_static=True, last_calculation=timezone.now(), name="cohort4" + team=self.team, + groups=[], + is_static=True, + last_calculation=timezone.now(), + name="cohort4", ) cohort4.insert_users_by_list(["2"]) @@ -586,7 +683,8 @@ def test_split_person_clickhouse(self): people = Person.objects.all().order_by("id") clickhouse_people = sync_execute( - "SELECT id FROM person FINAL WHERE team_id = %(team_id)s", {"team_id": self.team.pk} + "SELECT id FROM person FINAL WHERE team_id = %(team_id)s", + {"team_id": self.team.pk}, ) self.assertCountEqual(clickhouse_people, [(person.uuid,) for person in people]) @@ -622,7 +720,10 @@ def test_patch_user_property_activity(self): person_id=person.uuid, expected=[ { - "user": {"first_name": self.user.first_name, "email": self.user.email}, + "user": { + "first_name": self.user.first_name, + "email": self.user.email, + }, "activity": "updated", "created_at": "2021-08-25T22:09:14.252000Z", "scope": "Person", @@ -648,9 +749,15 @@ def test_patch_user_property_activity(self): def test_csv_export(self): _create_person( - team=self.team, distinct_ids=["1", "2", "3"], properties={"$browser": "whatever", "$os": "Mac OS X"} + team=self.team, + distinct_ids=["1", "2", "3"], + properties={"$browser": "whatever", "$os": "Mac OS X"}, + ) + _create_person( + team=self.team, + distinct_ids=["4"], + properties={"$browser": "whatever", "$os": "Windows"}, ) - _create_person(team=self.team, distinct_ids=["4"], properties={"$browser": "whatever", "$os": "Windows"}) flush_persons_and_events() response = self.client.get("/api/person.csv") @@ -668,7 +775,9 @@ def test_pagination_limit(self): for index in range(0, 19): created_ids.append(str(index + 100)) Person.objects.create( # creating without _create_person to guarentee created_at ordering - team=self.team, distinct_ids=[str(index + 100)], properties={"$browser": "whatever", "$os": "Windows"} + team=self.team, + distinct_ids=[str(index + 100)], + properties={"$browser": "whatever", "$os": "Windows"}, ) # Very occasionally, a person might be deleted in postgres but not in Clickhouse due to network issues or whatever @@ -733,16 +842,21 @@ def test_rate_limits_for_persons_are_independent(self, rate_limit_enabled_mock, for _ in range(5): response = self.client.get( - f"/api/projects/{self.team.pk}/feature_flags", HTTP_AUTHORIZATION=f"Bearer {personal_api_key}" + f"/api/projects/{self.team.pk}/feature_flags", + HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_200_OK) # Call to flags gets rate limited response = self.client.get( - f"/api/projects/{self.team.pk}/feature_flags", HTTP_AUTHORIZATION=f"Bearer {personal_api_key}" + f"/api/projects/{self.team.pk}/feature_flags", + HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) - self.assertEqual(len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), 1) + self.assertEqual( + len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), + 1, + ) incr_mock.assert_any_call( "rate_limit_exceeded", tags={ @@ -758,7 +872,8 @@ def test_rate_limits_for_persons_are_independent(self, rate_limit_enabled_mock, # but not call to persons for _ in range(3): response = self.client.get( - f"/api/projects/{self.team.pk}/persons/", HTTP_AUTHORIZATION=f"Bearer {personal_api_key}" + f"/api/projects/{self.team.pk}/persons/", + HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_200_OK) response = self.client.get( @@ -767,16 +882,23 @@ def test_rate_limits_for_persons_are_independent(self, rate_limit_enabled_mock, ) self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), 0) + self.assertEqual( + len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), + 0, + ) incr_mock.reset_mock() # until the limit is reached response = self.client.get( - f"/api/projects/{self.team.pk}/persons/", HTTP_AUTHORIZATION=f"Bearer {personal_api_key}" + f"/api/projects/{self.team.pk}/persons/", + HTTP_AUTHORIZATION=f"Bearer {personal_api_key}", ) self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) - self.assertEqual(len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), 1) + self.assertEqual( + len([1 for name, args, kwargs in incr_mock.mock_calls if args[0] == "rate_limit_exceeded"]), + 1, + ) incr_mock.assert_any_call( "rate_limit_exceeded", tags={ @@ -790,12 +912,19 @@ def test_rate_limits_for_persons_are_independent(self, rate_limit_enabled_mock, @freeze_time("2021-08-25T22:09:14.252Z") def test_person_cache_invalidation(self): _create_person( - team=self.team, distinct_ids=["person_1", "anonymous_id"], properties={"$os": "Chrome"}, immediate=True + team=self.team, + distinct_ids=["person_1", "anonymous_id"], + properties={"$os": "Chrome"}, + immediate=True, ) _create_event(event="test", team=self.team, distinct_id="person_1") _create_event(event="test", team=self.team, distinct_id="anonymous_id") _create_event(event="test", team=self.team, distinct_id="someone_else") - data = {"events": json.dumps([{"id": "test", "type": "events"}]), "entity_type": "events", "entity_id": "test"} + data = { + "events": json.dumps([{"id": "test", "type": "events"}]), + "entity_type": "events", + "entity_id": "test", + } trend_response = self.client.get( f"/api/projects/{self.team.id}/insights/trend/", @@ -807,7 +936,12 @@ def test_person_cache_invalidation(self): self.assertEqual(response["is_cached"], False) # Create another person - _create_person(team=self.team, distinct_ids=["person_2"], properties={"$os": "Chrome"}, immediate=True) + _create_person( + team=self.team, + distinct_ids=["person_2"], + properties={"$os": "Chrome"}, + immediate=True, + ) _create_event(event="test", team=self.team, distinct_id="person_2") # Check cached response hasn't changed @@ -832,7 +966,12 @@ def test_person_cache_invalidation(self): self.assertEqual(response["results"][0]["count"], 2) self.assertEqual(response["is_cached"], False) - def _get_person_activity(self, person_id: Optional[str] = None, *, expected_status: int = status.HTTP_200_OK): + def _get_person_activity( + self, + person_id: Optional[str] = None, + *, + expected_status: int = status.HTTP_200_OK, + ): if person_id: url = f"/api/person/{person_id}/activity" else: @@ -860,7 +999,9 @@ def test_pagination_limit(self): for index in range(0, 19): created_ids.append(str(index + 100)) Person.objects.create( # creating without _create_person to guarentee created_at ordering - team=self.team, distinct_ids=[str(index + 100)], properties={"$browser": "whatever", "$os": "Windows"} + team=self.team, + distinct_ids=[str(index + 100)], + properties={"$browser": "whatever", "$os": "Windows"}, ) returned_ids = [] response = self.client.get("/api/person/?limit=10").json() diff --git a/posthog/api/test/test_personal_api_keys.py b/posthog/api/test/test_personal_api_keys.py index 3f235da25e428..da30644e6d32e 100644 --- a/posthog/api/test/test_personal_api_keys.py +++ b/posthog/api/test/test_personal_api_keys.py @@ -22,7 +22,12 @@ def test_create_personal_api_key(self): self.assertIsNone(key.last_used_at) self.assertDictEqual( response_data, - {"id": key.id, "label": label, "last_used_at": None, "user_id": self.user.id}, + { + "id": key.id, + "label": label, + "last_used_at": None, + "user_id": self.user.id, + }, ) self.assertTrue(value.startswith("phx_")) # Personal API key prefix @@ -32,12 +37,19 @@ def test_create_personal_api_key_label_required(self): response_data = response.json() self.assertDictEqual( response_data, - {"type": "validation_error", "code": "blank", "detail": "This field may not be blank.", "attr": "label"}, + { + "type": "validation_error", + "code": "blank", + "detail": "This field may not be blank.", + "attr": "label", + }, ) def test_delete_personal_api_key(self): key = PersonalAPIKey.objects.create( - label="Test", user=self.user, secure_value=hash_key_value(generate_random_token_personal()) + label="Test", + user=self.user, + secure_value=hash_key_value(generate_random_token_personal()), ) self.assertEqual(PersonalAPIKey.objects.count(), 1) response = self.client.delete(f"/api/personal_api_keys/{key.id}/") @@ -47,11 +59,15 @@ def test_delete_personal_api_key(self): def test_list_only_user_personal_api_keys(self): my_label = "Test" my_key = PersonalAPIKey.objects.create( - label=my_label, user=self.user, secure_value=hash_key_value(generate_random_token_personal()) + label=my_label, + user=self.user, + secure_value=hash_key_value(generate_random_token_personal()), ) other_user = self._create_user("abc@def.xyz") PersonalAPIKey.objects.create( - label="Other test", user=other_user, secure_value=hash_key_value(generate_random_token_personal()) + label="Other test", + user=other_user, + secure_value=hash_key_value(generate_random_token_personal()), ) self.assertEqual(PersonalAPIKey.objects.count(), 2) response = self.client.get("/api/personal_api_keys") @@ -60,26 +76,42 @@ def test_list_only_user_personal_api_keys(self): self.assertEqual(len(response_data), 1) response_data[0].pop("created_at") self.assertDictEqual( - response_data[0], {"id": my_key.id, "label": my_label, "last_used_at": None, "user_id": self.user.id} + response_data[0], + { + "id": my_key.id, + "label": my_label, + "last_used_at": None, + "user_id": self.user.id, + }, ) def test_get_own_personal_api_key(self): my_label = "Test" my_key = PersonalAPIKey.objects.create( - label=my_label, user=self.user, secure_value=hash_key_value(generate_random_token_personal()) + label=my_label, + user=self.user, + secure_value=hash_key_value(generate_random_token_personal()), ) response = self.client.get(f"/api/personal_api_keys/{my_key.id}/") self.assertEqual(response.status_code, 200) response_data = response.json() response_data.pop("created_at") self.assertDictEqual( - response_data, {"id": my_key.id, "label": my_label, "last_used_at": None, "user_id": self.user.id} + response_data, + { + "id": my_key.id, + "label": my_label, + "last_used_at": None, + "user_id": self.user.id, + }, ) def test_get_someone_elses_personal_api_key(self): other_user = self._create_user("abc@def.xyz") other_key = PersonalAPIKey.objects.create( - label="Other test", user=other_user, secure_value=hash_key_value(generate_random_token_personal()) + label="Other test", + user=other_user, + secure_value=hash_key_value(generate_random_token_personal()), ) response = self.client.get(f"/api/personal_api_keys/{other_key.id}/") self.assertEqual(response.status_code, 404) @@ -113,7 +145,8 @@ def test_no_key(self): def test_header_resilient(self): response = self.client.get( - f"/api/projects/{self.team.id}/dashboards/", HTTP_AUTHORIZATION=f"Bearer {self.value} " + f"/api/projects/{self.team.id}/dashboards/", + HTTP_AUTHORIZATION=f"Bearer {self.value} ", ) self.assertEqual(response.status_code, 200) @@ -122,7 +155,10 @@ def test_query_string(self): self.assertEqual(response.status_code, 200) def test_body(self): - response = self.client.get(f"/api/projects/{self.team.id}/dashboards/", {"personal_api_key": self.value}) + response = self.client.get( + f"/api/projects/{self.team.id}/dashboards/", + {"personal_api_key": self.value}, + ) self.assertEqual(response.status_code, 200) def test_user_not_active(self): @@ -137,15 +173,19 @@ def test_user_endpoint(self): def test_does_not_interfere_with_temporary_token_auth(self): response = self.client.get( - f"/api/projects/{self.team.id}/dashboards/", HTTP_AUTHORIZATION=f"Bearer {self.value}" + f"/api/projects/{self.team.id}/dashboards/", + HTTP_AUTHORIZATION=f"Bearer {self.value}", ) self.assertEqual(response.status_code, status.HTTP_200_OK) impersonated_access_token = encode_jwt( - {"id": self.user.id}, timedelta(minutes=15), PosthogJwtAudience.IMPERSONATED_USER + {"id": self.user.id}, + timedelta(minutes=15), + PosthogJwtAudience.IMPERSONATED_USER, ) response = self.client.get( - f"/api/projects/{self.team.id}/dashboards/", HTTP_AUTHORIZATION=f"Bearer {impersonated_access_token}" + f"/api/projects/{self.team.id}/dashboards/", + HTTP_AUTHORIZATION=f"Bearer {impersonated_access_token}", ) self.assertEqual(response.status_code, status.HTTP_200_OK) diff --git a/posthog/api/test/test_persons_trends.py b/posthog/api/test/test_persons_trends.py index fa1c6bed6e912..24ccbf2bc7838 100644 --- a/posthog/api/test/test_persons_trends.py +++ b/posthog/api/test/test_persons_trends.py @@ -5,7 +5,9 @@ from posthog.constants import ENTITY_ID, ENTITY_MATH, ENTITY_TYPE, TRENDS_CUMULATIVE from posthog.models import Action, ActionStep, Cohort, Organization -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -42,17 +44,31 @@ def _create_events(self, use_time=False): secondTeam = Organization.objects.bootstrap(None, team_fields={"api_token": "token456"})[2] freeze_without_time = ["2019-12-24", "2020-01-01", "2020-01-02"] - freeze_with_time = ["2019-12-24 03:45:34", "2020-01-01 00:06:34", "2020-01-02 16:34:34"] + freeze_with_time = [ + "2019-12-24 03:45:34", + "2020-01-01 00:06:34", + "2020-01-02 16:34:34", + ] freeze_args = freeze_without_time if use_time: freeze_args = freeze_with_time with freeze_time(freeze_args[0]): - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": "value"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$some_property": "value"}, + ) with freeze_time(freeze_args[1]): - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": "value"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$some_property": "value"}, + ) _create_event(team=self.team, event="sign up", distinct_id="anonymous_id") _create_event(team=self.team, event="sign up", distinct_id="blabla") with freeze_time(freeze_args[2]): @@ -60,13 +76,19 @@ def _create_events(self, use_time=False): team=self.team, event="sign up", distinct_id="blabla", - properties={"$some_property": "other_value", "$some_numerical_prop": 80}, + properties={ + "$some_property": "other_value", + "$some_numerical_prop": 80, + }, ) _create_event(team=self.team, event="no events", distinct_id="blabla") # second team should have no effect _create_event( - team=secondTeam, event="sign up", distinct_id="blabla", properties={"$some_property": "other_value"} + team=secondTeam, + event="sign up", + distinct_id="blabla", + properties={"$some_property": "other_value"}, ) flush_persons_and_events() @@ -77,21 +99,30 @@ def test_people_cumulative(self): for i in range(20): _create_person(team_id=self.team.pk, distinct_ids=[f"blabla_{i}"]) _create_event( - team=self.team, event="sign up", distinct_id=f"blabla_{i}", properties={"$some_property": "value"} + team=self.team, + event="sign up", + distinct_id=f"blabla_{i}", + properties={"$some_property": "value"}, ) with freeze_time("2020-01-05 00:06:34"): for i in range(20, 40): _create_person(team_id=self.team.pk, distinct_ids=[f"blabla_{i}"]) _create_event( - team=self.team, event="sign up", distinct_id=f"blabla_{i}", properties={"$some_property": "value"} + team=self.team, + event="sign up", + distinct_id=f"blabla_{i}", + properties={"$some_property": "value"}, ) with freeze_time("2020-01-15 00:06:34"): for i in range(40, 80): _create_person(team_id=self.team.pk, distinct_ids=[f"blabla_{i}"]) _create_event( - team=self.team, event="sign up", distinct_id=f"blabla_{i}", properties={"$some_property": "value"} + team=self.team, + event="sign up", + distinct_id=f"blabla_{i}", + properties={"$some_property": "value"}, ) event_response = self.client.get( @@ -128,7 +159,12 @@ def _create_breakdown_events(self): with freeze_time(freeze_without_time[0]): for i in range(25): - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": i}) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$some_property": i}, + ) flush_persons_and_events() @@ -136,12 +172,20 @@ def test_people_endpoint_paginated(self): for index in range(0, 150): _create_person(team_id=self.team.pk, distinct_ids=["person" + str(index)]) _create_event( - team=self.team, event="sign up", distinct_id="person" + str(index), timestamp="2020-01-04T12:00:00Z" + team=self.team, + event="sign up", + distinct_id="person" + str(index), + timestamp="2020-01-04T12:00:00Z", ) event_response = self.client.get( f"/api/projects/{self.team.id}/persons/trends/", - data={"date_from": "2020-01-04", "date_to": "2020-01-04", ENTITY_TYPE: "events", ENTITY_ID: "sign up"}, + data={ + "date_from": "2020-01-04", + "date_to": "2020-01-04", + ENTITY_TYPE: "events", + ENTITY_ID: "sign up", + }, ).json() self.assertEqual(len(event_response["results"][0]["people"]), 100) @@ -158,17 +202,57 @@ def _create_people_interval_events(self): person7 = _create_person(team_id=self.team.pk, distinct_ids=["person7"]) # solo - _create_event(team=self.team, event="sign up", distinct_id="person1", timestamp="2020-01-04T14:10:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="person1", + timestamp="2020-01-04T14:10:00Z", + ) # group by hour - _create_event(team=self.team, event="sign up", distinct_id="person2", timestamp="2020-01-04T16:30:00Z") - _create_event(team=self.team, event="sign up", distinct_id="person3", timestamp="2020-01-04T16:50:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="person2", + timestamp="2020-01-04T16:30:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="person3", + timestamp="2020-01-04T16:50:00Z", + ) # group by min - _create_event(team=self.team, event="sign up", distinct_id="person4", timestamp="2020-01-04T19:20:00Z") - _create_event(team=self.team, event="sign up", distinct_id="person5", timestamp="2020-01-04T19:20:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="person4", + timestamp="2020-01-04T19:20:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="person5", + timestamp="2020-01-04T19:20:00Z", + ) # group by week and month - _create_event(team=self.team, event="sign up", distinct_id="person6", timestamp="2019-11-05T16:30:00Z") - _create_event(team=self.team, event="sign up", distinct_id="person7", timestamp="2019-11-07T16:50:00Z") - _create_event(team=self.team, event="sign up", distinct_id="person1", timestamp="2019-11-27T16:50:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="person6", + timestamp="2019-11-05T16:30:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="person7", + timestamp="2019-11-07T16:50:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="person1", + timestamp="2019-11-27T16:50:00Z", + ) flush_persons_and_events() return person1, person2, person3, person4, person5, person6, person7 @@ -176,11 +260,29 @@ def _create_people_interval_events(self): def test_hour_interval(self): sign_up_action, person = self._create_events() - person1, person2, person3, person4, person5, person6, person7 = self._create_people_interval_events() + ( + person1, + person2, + person3, + person4, + person5, + person6, + person7, + ) = self._create_people_interval_events() _create_person(team_id=self.team.pk, distinct_ids=["outside_range"]) - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-01-04T13:50:00Z") - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-01-04T15:50:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2020-01-04T13:50:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2020-01-04T15:50:00Z", + ) # check solo hour action_response = self.client.get( f"/api/projects/{self.team.id}/persons/trends/", @@ -231,18 +333,40 @@ def test_hour_interval(self): self.assertListEqual(sorted(all_people_ids), sorted([str(person2.uuid), str(person3.uuid)])) self.assertEqual(len(all_people_ids), 2) self.assertEntityResponseEqual( - hour_grouped_action_response["results"], hour_grouped_grevent_response["results"], remove=[] + hour_grouped_action_response["results"], + hour_grouped_grevent_response["results"], + remove=[], ) def test_day_interval(self): sign_up_action, person = self._create_events() person1 = _create_person(team_id=self.team.pk, distinct_ids=["person1"]) _create_person(team_id=self.team.pk, distinct_ids=["person2"]) - _create_event(team=self.team, event="sign up", distinct_id="person1", timestamp="2020-01-04T12:00:00Z") - _create_event(team=self.team, event="sign up", distinct_id="person2", timestamp="2020-01-05T12:00:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="person1", + timestamp="2020-01-04T12:00:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="person2", + timestamp="2020-01-05T12:00:00Z", + ) _create_person(team_id=self.team.pk, distinct_ids=["outside_range"]) - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-01-03T13:50:00Z") - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-01-05T15:50:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2020-01-03T13:50:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2020-01-05T15:50:00Z", + ) # test people action_response = self.client.get( @@ -274,11 +398,31 @@ def test_day_interval_cumulative(self): sign_up_action, person = self._create_events() person1 = _create_person(team_id=self.team.pk, distinct_ids=["person1"]) person2 = _create_person(team_id=self.team.pk, distinct_ids=["person2"]) - _create_event(team=self.team, event="sign up", distinct_id="person1", timestamp="2020-01-03T12:00:00Z") - _create_event(team=self.team, event="sign up", distinct_id="person2", timestamp="2020-01-04T20:00:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="person1", + timestamp="2020-01-03T12:00:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="person2", + timestamp="2020-01-04T20:00:00Z", + ) _create_person(team_id=self.team.pk, distinct_ids=["outside_range"]) - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-01-02T13:50:00Z") - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-01-05T15:50:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2020-01-02T13:50:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2020-01-05T15:50:00Z", + ) # test people action_response = self.client.get( @@ -313,11 +457,29 @@ def test_day_interval_cumulative(self): def test_week_interval(self): sign_up_action, person = self._create_events() - person1, person2, person3, person4, person5, person6, person7 = self._create_people_interval_events() + ( + person1, + person2, + person3, + person4, + person5, + person6, + person7, + ) = self._create_people_interval_events() _create_person(team_id=self.team.pk, distinct_ids=["outside_range"]) - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2019-10-26T13:50:00Z") - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-11-11T15:50:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2019-10-26T13:50:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2020-11-11T15:50:00Z", + ) # check grouped week week_grouped_action_response = self.client.get( f"/api/projects/{self.team.id}/persons/trends/", @@ -346,17 +508,37 @@ def test_week_interval(self): self.assertListEqual(sorted(all_people_ids), sorted([str(person6.uuid), str(person7.uuid)])) self.assertEntityResponseEqual( - week_grouped_action_response["results"], week_grouped_grevent_response["results"], remove=[] + week_grouped_action_response["results"], + week_grouped_grevent_response["results"], + remove=[], ) def test_month_interval(self): sign_up_action, person = self._create_events() - person1, person2, person3, person4, person5, person6, person7 = self._create_people_interval_events() + ( + person1, + person2, + person3, + person4, + person5, + person6, + person7, + ) = self._create_people_interval_events() _create_person(team_id=self.team.pk, distinct_ids=["outside_range"]) - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2019-12-01T13:50:00Z") - _create_event(team=self.team, event="sign up", distinct_id="outside_range", timestamp="2020-10-10T15:50:00Z") + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2019-12-01T13:50:00Z", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="outside_range", + timestamp="2020-10-10T15:50:00Z", + ) # check grouped month month_group_action_response = self.client.get( f"/api/projects/{self.team.id}/persons/trends/", @@ -381,14 +563,23 @@ def test_month_interval(self): all_people_ids = [str(person["id"]) for person in month_group_action_response["results"][0]["people"]] self.assertEqual(len(all_people_ids), 3) - self.assertListEqual(sorted(all_people_ids), sorted([str(person6.uuid), str(person7.uuid), str(person1.uuid)])) + self.assertListEqual( + sorted(all_people_ids), + sorted([str(person6.uuid), str(person7.uuid), str(person1.uuid)]), + ) self.assertEntityResponseEqual( - month_group_action_response["results"], month_group_grevent_response["results"], remove=[] + month_group_action_response["results"], + month_group_grevent_response["results"], + remove=[], ) def _create_multiple_people(self): - person1 = _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"name": "person1"}) + person1 = _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"name": "person1"}, + ) _create_event( team=self.team, event="watched movie", @@ -397,7 +588,11 @@ def _create_multiple_people(self): properties={"event_prop": "prop1"}, ) - person2 = _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"name": "person2"}) + person2 = _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"name": "person2"}, + ) _create_event( team=self.team, event="watched movie", @@ -421,7 +616,11 @@ def _create_multiple_people(self): properties={"event_prop": "prop1"}, ) - person3 = _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"name": "person3"}) + person3 = _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"name": "person3"}, + ) _create_event( team=self.team, event="watched movie", @@ -444,7 +643,11 @@ def _create_multiple_people(self): properties={"event_prop": "prop2"}, ) - person4 = _create_person(team_id=self.team.pk, distinct_ids=["person4"], properties={"name": "person4"}) + person4 = _create_person( + team_id=self.team.pk, + distinct_ids=["person4"], + properties={"name": "person4"}, + ) _create_event( team=self.team, event="watched movie", @@ -507,7 +710,10 @@ def test_breakdown_by_cohort_people_endpoint(self): _create_cohort( name="cohort3", team=self.team, - groups=[{"properties": {"name": "person1"}}, {"properties": {"name": "person2"}}], + groups=[ + {"properties": {"name": "person1"}}, + {"properties": {"name": "person2"}}, + ], ) _create_action(name="watched movie", team=self.team) @@ -760,7 +966,11 @@ def test_breakdown_by_event_property_none_people_endpoint(self): _create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "p2"}) _create_event( - team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-09T12:00:00Z", properties={} + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2020-01-09T12:00:00Z", + properties={}, ) _create_event( team=self.team, @@ -803,7 +1013,12 @@ def test_breakdown_by_event_property_none_people_endpoint(self): @snapshot_clickhouse_queries def test_trends_people_endpoint_includes_recordings(self): _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={}) - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-09T14:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-09T14:00:00Z", + ) _create_event( event_uuid="693402ed-590e-4737-ba26-93ebf18121bd", team=self.team, @@ -853,11 +1068,29 @@ def test_trends_people_endpoint_includes_recordings(self): @snapshot_clickhouse_queries def test_trends_people_endpoint_filters_search(self): - _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"email": "ben@posthog.com"}) - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-09T14:00:00Z") + _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"email": "ben@posthog.com"}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-09T14:00:00Z", + ) - _create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"email": "neil@posthog.com"}) - _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-09T14:00:00Z") + _create_person( + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"email": "neil@posthog.com"}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2020-01-09T14:00:00Z", + ) params = { "date_from": "2020-01-08", diff --git a/posthog/api/test/test_plugin.py b/posthog/api/test/test_plugin.py index 945b4f8051e50..ea8021975daa8 100644 --- a/posthog/api/test/test_plugin.py +++ b/posthog/api/test/test_plugin.py @@ -61,18 +61,27 @@ def assert_plugin_activity(self, expected: List[Dict]): def test_create_plugin_auth(self, mock_get, mock_reload): repo_url = "https://github.com/PostHog/helloworldplugin" - for level in (Organization.PluginsAccessLevel.NONE, Organization.PluginsAccessLevel.CONFIG): + for level in ( + Organization.PluginsAccessLevel.NONE, + Organization.PluginsAccessLevel.CONFIG, + ): self.organization.plugins_access_level = level self.organization.save() response = self.client.post("/api/organizations/@current/plugins/", {"url": repo_url}) self.assertEqual( - response.status_code, 403, "Did not reject plugin installation as non-install org properly" + response.status_code, + 403, + "Did not reject plugin installation as non-install org properly", ) self.organization.plugins_access_level = Organization.PluginsAccessLevel.INSTALL self.organization.save() response = self.client.post("/api/organizations/@current/plugins/", {"url": repo_url}) - self.assertEqual(response.status_code, 201, "Did not manage to install plugin properly despite install access") + self.assertEqual( + response.status_code, + 201, + "Did not manage to install plugin properly despite install access", + ) self.assert_plugin_activity( [ @@ -94,7 +103,11 @@ def test_create_plugin_auth(self, mock_get, mock_reload): ) response = self.client.post("/api/organizations/@current/plugins/", {"url": repo_url}) - self.assertEqual(response.status_code, 400, "Did not reject already installed plugin properly") + self.assertEqual( + response.status_code, + 400, + "Did not reject already installed plugin properly", + ) def test_create_plugin_auth_globally_managed(self, mock_get, mock_reload): repo_url = "https://github.com/PostHog/helloworldplugin" @@ -106,7 +119,10 @@ def test_create_plugin_auth_globally_managed(self, mock_get, mock_reload): ): self.organization.plugins_access_level = level self.organization.save() - response = self.client.post("/api/organizations/@current/plugins/", {"url": repo_url, "is_global": True}) + response = self.client.post( + "/api/organizations/@current/plugins/", + {"url": repo_url, "is_global": True}, + ) self.assertEqual( response.status_code, 403, @@ -117,11 +133,17 @@ def test_create_plugin_auth_globally_managed(self, mock_get, mock_reload): self.organization.save() response = self.client.post("/api/organizations/@current/plugins/", {"url": repo_url, "is_global": True}) self.assertEqual( - response.status_code, 201, "Did not manage to install globally managed plugin properly despite root access" + response.status_code, + 201, + "Did not manage to install globally managed plugin properly despite root access", ) response = self.client.post("/api/organizations/@current/plugins/", {"url": repo_url, "is_global": True}) - self.assertEqual(response.status_code, 400, "Did not reject already installed plugin properly") + self.assertEqual( + response.status_code, + 400, + "Did not reject already installed plugin properly", + ) def test_globally_managed_visible_to_all_orgs(self, mock_get, mock_reload): my_org = self.organization @@ -132,16 +154,22 @@ def test_globally_managed_visible_to_all_orgs(self, mock_get, mock_reload): repo_url = "https://github.com/PostHog/helloworldplugin" install_response = self.client.post(f"/api/organizations/{my_org.id}/plugins/", {"url": repo_url}) - self.assertEqual(install_response.status_code, 201, "Did not manage to install plugin properly") + self.assertEqual( + install_response.status_code, + 201, + "Did not manage to install plugin properly", + ) # The plugin is NOT global and should only show up for my org list_response_other_org_1 = self.client.get(f"/api/organizations/{other_org.id}/plugins/") self.assertDictEqual( - list_response_other_org_1.json(), {"count": 0, "next": None, "previous": None, "results": []} + list_response_other_org_1.json(), + {"count": 0, "next": None, "previous": None, "results": []}, ) self.assertEqual(list_response_other_org_1.status_code, 200) # Let's make the plugin global update_response_my_org = self.client.patch( - f"/api/organizations/{my_org.id}/plugins/{install_response.json()['id']}/", {"is_global": True} + f"/api/organizations/{my_org.id}/plugins/{install_response.json()['id']}/", + {"is_global": True}, ) self.assertEqual(update_response_my_org.status_code, 200) # Now the plugin is global and should show up for other org @@ -155,7 +183,8 @@ def test_no_longer_globally_managed_still_visible_to_org_iff_has_config(self, mo name="FooBar2", plugins_access_level=Organization.PluginsAccessLevel.CONFIG ) no_plugins_org: Organization = Organization.objects.create( - name="NoPlugins", plugins_access_level=Organization.PluginsAccessLevel.CONFIG + name="NoPlugins", + plugins_access_level=Organization.PluginsAccessLevel.CONFIG, ) other_team: Team = Team.objects.create(organization=other_org, name="FooBar2") OrganizationMembership.objects.create(user=self.user, organization=other_org) @@ -181,22 +210,27 @@ def test_globally_managed_only_manageable_by_owner_org(self, mock_get, mock_relo repo_url = "https://github.com/PostHog/helloworldplugin" install_response = self.client.post( - f"/api/organizations/{my_org.id}/plugins/", {"url": repo_url, "is_global": True} + f"/api/organizations/{my_org.id}/plugins/", + {"url": repo_url, "is_global": True}, ) self.assertEqual( - install_response.status_code, 201, "Did not manage to install globally managed plugin properly" + install_response.status_code, + 201, + "Did not manage to install globally managed plugin properly", ) # My org patch_response_other_org_1 = self.client.patch( - f"/api/organizations/{my_org.id}/plugins/{install_response.json()['id']}", {"description": "X"} + f"/api/organizations/{my_org.id}/plugins/{install_response.json()['id']}", + {"description": "X"}, ) self.assertEqual(patch_response_other_org_1.status_code, 200) self.assertEqual("X", patch_response_other_org_1.json().get("description")) # Other org patch_response_other_org_2 = self.client.patch( - f"/api/organizations/{other_org.id}/plugins/{install_response.json()['id']}", {"description": "Y"} + f"/api/organizations/{other_org.id}/plugins/{install_response.json()['id']}", + {"description": "Y"}, ) self.assertEqual(patch_response_other_org_2.status_code, 403) self.assertIn( @@ -210,51 +244,73 @@ def test_update_plugin_auth_to_globally_managed(self, mock_get, mock_reload): self.assertEqual(install_response.status_code, 201) for is_global in (True, False): - for level in (Organization.PluginsAccessLevel.NONE, Organization.PluginsAccessLevel.CONFIG): + for level in ( + Organization.PluginsAccessLevel.NONE, + Organization.PluginsAccessLevel.CONFIG, + ): self.organization.plugins_access_level = level self.organization.save() response = self.client.patch( - f"/api/organizations/@current/plugins/{install_response.json()['id']}/", {"is_global": is_global} + f"/api/organizations/@current/plugins/{install_response.json()['id']}/", + {"is_global": is_global}, ) self.assertEqual( - response.status_code, 403, "Plugin was not 403 for org despite it having no plugin install access" + response.status_code, + 403, + "Plugin was not 403 for org despite it having no plugin install access", ) self.organization.plugins_access_level = Organization.PluginsAccessLevel.INSTALL self.organization.save() for is_global in (True, False): response = self.client.patch( - f"/api/organizations/@current/plugins/{install_response.json()['id']}/", {"is_global": is_global} + f"/api/organizations/@current/plugins/{install_response.json()['id']}/", + {"is_global": is_global}, ) self.assertEqual( - response.status_code, 403, "Did not reject globally managed plugin update as non-root org properly" + response.status_code, + 403, + "Did not reject globally managed plugin update as non-root org properly", ) self.organization.plugins_access_level = Organization.PluginsAccessLevel.ROOT self.organization.save() for is_global in (True, False): response = self.client.patch( - f"/api/organizations/@current/plugins/{install_response.json()['id']}/", {"is_global": is_global} + f"/api/organizations/@current/plugins/{install_response.json()['id']}/", + {"is_global": is_global}, ) self.assertEqual( - response.status_code, 200, "Did not manage to make plugin globally managed properly despite root access" + response.status_code, + 200, + "Did not manage to make plugin globally managed properly despite root access", ) def test_plugin_private_token_url_unique(self, mock_get, mock_reload): repo_url = "https://gitlab.com/mariusandra/helloworldplugin" - response = self.client.post("/api/organizations/@current/plugins/", {"url": f"{repo_url}?private_token=123"}) + response = self.client.post( + "/api/organizations/@current/plugins/", + {"url": f"{repo_url}?private_token=123"}, + ) self.assertEqual(response.status_code, 201) - response = self.client.post("/api/organizations/@current/plugins/", {"url": f"{repo_url}?private_token=123"}) + response = self.client.post( + "/api/organizations/@current/plugins/", + {"url": f"{repo_url}?private_token=123"}, + ) self.assertEqual(response.status_code, 400) response = self.client.post("/api/organizations/@current/plugins/", {"url": repo_url}) self.assertEqual(response.status_code, 400) - response = self.client.post("/api/organizations/@current/plugins/", {"url": f"{repo_url}?private_token=567"}) + response = self.client.post( + "/api/organizations/@current/plugins/", + {"url": f"{repo_url}?private_token=567"}, + ) self.assertEqual(response.status_code, 400) response = self.client.post("/api/organizations/@current/plugins/", {"url": f"{repo_url}-other"}) self.assertEqual(response.status_code, 201) response = self.client.post( - "/api/organizations/@current/plugins/", {"url": f"{repo_url}-other?private_token=567"} + "/api/organizations/@current/plugins/", + {"url": f"{repo_url}-other?private_token=567"}, ) self.assertEqual(response.status_code, 400) @@ -280,7 +336,10 @@ def test_update_plugin_auth(self, mock_sync_from_plugin_archive, mock_get, mock_ plugin.refresh_from_db() self.assertEqual(plugin.updated_at, fake_date) - for level in (Organization.PluginsAccessLevel.NONE, Organization.PluginsAccessLevel.CONFIG): + for level in ( + Organization.PluginsAccessLevel.NONE, + Organization.PluginsAccessLevel.CONFIG, + ): self.organization.plugins_access_level = level self.organization.save() response = self.client.post(api_url, {"url": repo_url}) @@ -297,7 +356,10 @@ def test_delete_plugin_auth(self, mock_get, mock_reload): api_url = "/api/organizations/@current/plugins/{}".format(response.json()["id"]) - for level in (Organization.PluginsAccessLevel.NONE, Organization.PluginsAccessLevel.CONFIG): + for level in ( + Organization.PluginsAccessLevel.NONE, + Organization.PluginsAccessLevel.CONFIG, + ): self.organization.plugins_access_level = level self.organization.save() response = self.client.delete(api_url) @@ -361,7 +423,10 @@ def test_cannot_delete_of_other_orgs_plugin(self, mock_get, mock_reload): def test_cannot_delete_global_plugin(self, mock_get, mock_reload): repo_url = "https://github.com/PostHog/helloworldplugin" - response = self.client.post(f"/api/organizations/@current/plugins/", {"url": repo_url, "is_global": True}) + response = self.client.post( + f"/api/organizations/@current/plugins/", + {"url": repo_url, "is_global": True}, + ) self.assertEqual(response.status_code, 201) @@ -370,13 +435,15 @@ def test_cannot_delete_global_plugin(self, mock_get, mock_reload): self.assertEqual(response.status_code, 400) self.assertEqual( - response.json().get("detail"), "This plugin is marked as global! Make it local before uninstallation" + response.json().get("detail"), + "This plugin is marked as global! Make it local before uninstallation", ) def test_create_plugin_repo_url(self, mock_get, mock_reload): self.assertEqual(mock_reload.call_count, 0) response = self.client.post( - "/api/organizations/@current/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + "/api/organizations/@current/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(response.status_code, 201) self.assertEqual( @@ -389,7 +456,12 @@ def test_create_plugin_repo_url(self, mock_get, mock_reload): "url": "https://github.com/PostHog/helloworldplugin", "icon": None, "config_schema": { - "bar": {"name": "What's in the bar?", "type": "string", "default": "baz", "required": False} + "bar": { + "name": "What's in the bar?", + "type": "string", + "default": "baz", + "required": False, + } }, "tag": HELLO_WORLD_PLUGIN_GITHUB_ZIP[0], "latest_tag": HELLO_WORLD_PLUGIN_GITHUB_ZIP[0], @@ -428,7 +500,12 @@ def test_create_plugin_commit_url(self, mock_get, mock_reload): "url": f"https://github.com/PostHog/helloworldplugin/commit/{HELLO_WORLD_PLUGIN_GITHUB_ZIP[0]}", "icon": None, "config_schema": { - "bar": {"name": "What's in the bar?", "type": "string", "default": "baz", "required": False} + "bar": { + "name": "What's in the bar?", + "type": "string", + "default": "baz", + "required": False, + } }, "tag": HELLO_WORLD_PLUGIN_GITHUB_ZIP[0], "latest_tag": HELLO_WORLD_PLUGIN_GITHUB_ZIP[0], @@ -464,8 +541,17 @@ def test_create_plugin_other_commit_url(self, mock_get, mock_reload): "url": f"https://github.com/PostHog/helloworldplugin/commit/{HELLO_WORLD_PLUGIN_GITHUB_ATTACHMENT_ZIP[0]}", "icon": None, "config_schema": { - "bar": {"name": "What's in the bar?", "type": "string", "default": "baz", "required": False}, - "foodb": {"name": "Upload your database", "type": "attachment", "required": False}, + "bar": { + "name": "What's in the bar?", + "type": "string", + "default": "baz", + "required": False, + }, + "foodb": { + "name": "Upload your database", + "type": "attachment", + "required": False, + }, }, "tag": HELLO_WORLD_PLUGIN_GITHUB_ATTACHMENT_ZIP[0], "latest_tag": HELLO_WORLD_PLUGIN_GITHUB_ATTACHMENT_ZIP[0], @@ -554,7 +640,8 @@ def test_create_plugin_version_range_lt_invalid(self, mock_get, mock_reload): ) self.assertEqual(response.status_code, 400) self.assertEqual( - cast(Dict[str, str], response.json())["detail"], 'Invalid PostHog semantic version requirement "< ..."!' + cast(Dict[str, str], response.json())["detail"], + 'Invalid PostHog semantic version requirement "< ..."!', ) def test_create_plugin_version_range_gt_next_major_ignore_on_cloud(self, mock_get, mock_reload): @@ -571,7 +658,8 @@ def test_update_plugin_source(self, mock_get, mock_reload): # Create the plugin self.assertEqual(mock_reload.call_count, 0) response = self.client.post( - "/api/organizations/@current/plugins/", {"plugin_type": "source", "name": "myplugin_original"} + "/api/organizations/@current/plugins/", + {"plugin_type": "source", "name": "myplugin_original"}, ) plugin_id = response.json()["id"] self.assertEqual(mock_reload.call_count, 0) @@ -587,7 +675,10 @@ def test_update_plugin_source(self, mock_get, mock_reload): data=json.dumps({"index.ts": "'hello world'", "plugin.json": '{"name":"my plugin"}'}), content_type="application/json", ) - self.assertEqual(response.json(), {"index.ts": "'hello world'", "plugin.json": '{"name":"my plugin"}'}) + self.assertEqual( + response.json(), + {"index.ts": "'hello world'", "plugin.json": '{"name":"my plugin"}'}, + ) self.assertEqual(Plugin.objects.get(pk=plugin_id).name, "my plugin") self.assertEqual(mock_reload.call_count, 1) @@ -597,7 +688,10 @@ def test_update_plugin_source(self, mock_get, mock_reload): data=json.dumps({"index.ts": "'hello again'"}), content_type="application/json", ) - self.assertEqual(response.json(), {"index.ts": "'hello again'", "plugin.json": '{"name":"my plugin"}'}) + self.assertEqual( + response.json(), + {"index.ts": "'hello again'", "plugin.json": '{"name":"my plugin"}'}, + ) self.assertEqual(mock_reload.call_count, 2) # Deleting a file by passing `None` @@ -612,7 +706,8 @@ def test_update_plugin_source(self, mock_get, mock_reload): def test_create_plugin_frontend_source(self, mock_get, mock_reload): self.assertEqual(mock_reload.call_count, 0) response = self.client.post( - "/api/organizations/@current/plugins/", {"plugin_type": "source", "name": "myplugin"} + "/api/organizations/@current/plugins/", + {"plugin_type": "source", "name": "myplugin"}, ) self.assertEqual(response.status_code, 201) id = response.json()["id"] @@ -640,7 +735,8 @@ def test_create_plugin_frontend_source(self, mock_get, mock_reload): self.assertEqual(mock_reload.call_count, 0) response = self.client.patch( - f"/api/organizations/@current/plugins/{id}/update_source", {"frontend.tsx": "export const scene = {}"} + f"/api/organizations/@current/plugins/{id}/update_source", + {"frontend.tsx": "export const scene = {}"}, ) self.assertEqual(Plugin.objects.count(), 1) @@ -653,7 +749,10 @@ def test_create_plugin_frontend_source(self, mock_get, mock_reload): # no frontend, since no pluginserver transpiles the code response = self.client.get(f"/api/plugin_config/{plugin_config.id}/frontend") self.assertEqual(response.status_code, 200) - self.assertEqual(response.content, b'export function getFrontendApp () { return {"transpiling": true} }') + self.assertEqual( + response.content, + b'export function getFrontendApp () { return {"transpiling": true} }', + ) # mock the plugin server's transpilation plugin_source = PluginSourceFile.objects.get(plugin_id=id) @@ -682,7 +781,10 @@ def test_create_plugin_frontend_source(self, mock_get, mock_reload): # And reply that it's transpiling response = self.client.get(f"/api/plugin_config/{plugin_config.id}/frontend") self.assertEqual(response.status_code, 200) - self.assertEqual(response.content, b'export function getFrontendApp () { return {"transpiling": true} }') + self.assertEqual( + response.content, + b'export function getFrontendApp () { return {"transpiling": true} }', + ) def test_plugin_repository(self, mock_get, mock_reload): response = self.client.get("/api/organizations/@current/plugins/repository/") @@ -732,7 +834,8 @@ def test_install_plugin_on_multiple_orgs(self, mock_get, mock_reload): fake_date = datetime(2022, 1, 1, 0, 0).replace(tzinfo=ZoneInfo("UTC")) with freeze_time(fake_date.isoformat()): response = self.client.post( - f"/api/organizations/{my_org.id}/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + f"/api/organizations/{my_org.id}/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(response.status_code, 201) self.assertEqual(Plugin.objects.count(), 1) @@ -741,14 +844,16 @@ def test_install_plugin_on_multiple_orgs(self, mock_get, mock_reload): self.assertEqual(plugin.updated_at, fake_date) response = self.client.post( - f"/api/organizations/{my_org.id}/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + f"/api/organizations/{my_org.id}/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(response.status_code, 400) self.assertEqual(Plugin.objects.count(), 1) # try to save it for another org response = self.client.post( - f"/api/organizations/{other_org.id}/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + f"/api/organizations/{other_org.id}/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(response.status_code, 403) self.assertEqual(Plugin.objects.count(), 1) @@ -756,12 +861,14 @@ def test_install_plugin_on_multiple_orgs(self, mock_get, mock_reload): self.user.join(organization=other_org, level=OrganizationMembership.Level.OWNER) response = self.client.post( - f"/api/organizations/{other_org.id}/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + f"/api/organizations/{other_org.id}/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(response.status_code, 201) self.assertEqual(Plugin.objects.count(), 2) response = self.client.post( - f"/api/organizations/{other_org.id}/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + f"/api/organizations/{other_org.id}/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(response.status_code, 400) self.assertEqual(Plugin.objects.count(), 2) @@ -800,13 +907,22 @@ def test_listing_plugins_is_not_nplus1(self, _mock_get, _mock_reload) -> None: def _assert_number_of_when_listed_plugins(self, expected_plugins_count: int) -> None: response_with_none = self.client.get(f"/api/organizations/@current/plugins/") self.assertEqual(response_with_none.status_code, 200) - self.assertEqual(response_with_none.json()["count"], expected_plugins_count, response_with_none.json()) - self.assertEqual(len(response_with_none.json()["results"]), expected_plugins_count, response_with_none.json()) + self.assertEqual( + response_with_none.json()["count"], + expected_plugins_count, + response_with_none.json(), + ) + self.assertEqual( + len(response_with_none.json()["results"]), + expected_plugins_count, + response_with_none.json(), + ) def test_create_plugin_config(self, mock_get, mock_reload): self.assertEqual(mock_reload.call_count, 0) response = self.client.post( - "/api/organizations/@current/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + "/api/organizations/@current/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(response.status_code, 201) self.assertEqual(Plugin.objects.count(), 1) @@ -814,7 +930,12 @@ def test_create_plugin_config(self, mock_get, mock_reload): plugin_id = response.json()["id"] response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "moop"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "moop"}), + }, format="multipart", ) self.assertEqual(response.status_code, 201, response.content) @@ -842,7 +963,12 @@ def test_create_plugin_config(self, mock_get, mock_reload): # If we're trying to create another plugin config for the same plugin, just return the original response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "moop"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "moop"}), + }, format="multipart", ) self.assertEqual(response.json()["id"], plugin_config_id) @@ -875,7 +1001,8 @@ def test_create_plugin_config(self, mock_get, mock_reload): def test_create_plugin_config_auth(self, mock_get, mock_reload): response = self.client.post( - "/api/organizations/@current/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + "/api/organizations/@current/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) plugin_id = response.json()["id"] @@ -883,7 +1010,12 @@ def test_create_plugin_config_auth(self, mock_get, mock_reload): self.organization.save() response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "moop"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "moop"}), + }, format="multipart", ) self.assertEqual(response.status_code, 400) @@ -897,19 +1029,30 @@ def test_create_plugin_config_auth(self, mock_get, mock_reload): self.organization.save() response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "moop"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "moop"}), + }, format="multipart", ) self.assertEqual(response.status_code, 201) def test_update_plugin_config_auth(self, mock_get, mock_reload): response = self.client.post( - "/api/organizations/@current/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + "/api/organizations/@current/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) plugin_id = response.json()["id"] response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "moop"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "moop"}), + }, format="multipart", ) plugin_config_id = response.json()["id"] @@ -958,12 +1101,18 @@ def test_update_plugin_config_no_longer_globally_managed_but_still_enabled(self, def test_delete_plugin_config_auth(self, mock_get, mock_reload): response = self.client.post( - "/api/organizations/@current/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + "/api/organizations/@current/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) plugin_id = response.json()["id"] response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "moop"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "moop"}), + }, format="multipart", ) plugin_config_id = response.json()["id"] @@ -1036,7 +1185,9 @@ def test_plugin_config_attachment(self, mock_get, mock_reload): ) response = self.client.patch( - f"/api/plugin_config/{plugin_config_id}", {"add_attachment[foodb]": tmp_file_2}, format="multipart" + f"/api/plugin_config/{plugin_config_id}", + {"add_attachment[foodb]": tmp_file_2}, + format="multipart", ) self.assertEqual(PluginAttachment.objects.count(), 1) @@ -1055,7 +1206,9 @@ def test_plugin_config_attachment(self, mock_get, mock_reload): ) response = self.client.patch( - f"/api/plugin_config/{plugin_config_id}", {"remove_attachment[foodb]": True}, format="multipart" + f"/api/plugin_config/{plugin_config_id}", + {"remove_attachment[foodb]": True}, + format="multipart", ) self.assertEqual(response.json()["config"], {"bar": "moop"}) self.assertEqual(PluginAttachment.objects.count(), 0) @@ -1131,7 +1284,12 @@ def test_create_plugin_config_with_secrets(self, mock_get, mock_reload): plugin_id = response.json()["id"] response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "very secret value"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "very secret value"}), + }, format="multipart", ) plugin_config_id = response.json()["id"] @@ -1182,7 +1340,11 @@ def test_create_plugin_config_with_secrets(self, mock_get, mock_reload): # Test that secret values are updated but never revealed response = self.client.patch( f"/api/plugin_config/{plugin_config_id}", - {"enabled": False, "order": 1, "config": json.dumps({"bar": "a new very secret value"})}, + { + "enabled": False, + "order": 1, + "config": json.dumps({"bar": "a new very secret value"}), + }, format="multipart", ) self.assertEqual(Plugin.objects.count(), 1) @@ -1255,12 +1417,18 @@ def test_plugin_config_list(self, mock_get, mock_reload): @patch("posthog.api.plugin.connections") def test_job_trigger(self, db_connections, mock_validate_plugin_job_payload, mock_get, mock_reload): response = self.client.post( - "/api/organizations/@current/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + "/api/organizations/@current/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) plugin_id = response.json()["id"] response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "moop"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "moop"}), + }, format="multipart", ) plugin_config_id = response.json()["id"] @@ -1289,7 +1457,8 @@ def test_job_trigger(self, db_connections, mock_validate_plugin_job_payload, moc def test_check_for_updates_plugins_reload_not_called(self, _, mock_reload): response = self.client.post( - "/api/organizations/@current/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + "/api/organizations/@current/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(mock_reload.call_count, 1) @@ -1313,14 +1482,20 @@ def test_get_all_activity(self, _, mock_reload): self.organization.plugins_access_level = Organization.PluginsAccessLevel.INSTALL self.organization.save() response = self.client.post( - "/api/organizations/@current/plugins/", {"url": "https://github.com/PostHog/helloworldplugin"} + "/api/organizations/@current/plugins/", + {"url": "https://github.com/PostHog/helloworldplugin"}, ) self.assertEqual(response.status_code, 201) plugin_id = response.json()["id"] response = self.client.post( "/api/plugin_config/", - {"plugin": plugin_id, "enabled": True, "order": 0, "config": json.dumps({"bar": "moop"})}, + { + "plugin": plugin_id, + "enabled": True, + "order": 0, + "config": json.dumps({"bar": "moop"}), + }, ) plugin_config_id = response.json()["id"] diff --git a/posthog/api/test/test_preflight.py b/posthog/api/test/test_preflight.py index 50c018a2ea0a8..f277580857099 100644 --- a/posthog/api/test/test_preflight.py +++ b/posthog/api/test/test_preflight.py @@ -5,7 +5,10 @@ from django.utils import timezone from rest_framework import status -from posthog.cloud_utils import TEST_clear_cloud_cache, TEST_clear_instance_license_cache +from posthog.cloud_utils import ( + TEST_clear_cloud_cache, + TEST_clear_instance_license_cache, +) from posthog.models.instance_setting import set_instance_setting from posthog.models.organization import Organization, OrganizationInvite from posthog.test.base import APIBaseTest, QueryMatchingTest, snapshot_postgres_queries @@ -31,7 +34,11 @@ def preflight_dict(self, options={}): "kafka": True, "realm": "hosted-clickhouse", "region": None, - "available_social_auth_providers": {"google-oauth2": False, "github": False, "gitlab": False}, + "available_social_auth_providers": { + "google-oauth2": False, + "github": False, + "gitlab": False, + }, "can_create_org": False, "email_service_available": False, "slack_service": {"available": False, "client_id": None}, @@ -95,7 +102,10 @@ def test_preflight_request_with_object_storage_available(self, patched_s3_client response = response.json() available_timezones = cast(dict, response).pop("available_timezones") - self.assertEqual(response, self.preflight_authenticated_dict({"object_storage": True})) + self.assertEqual( + response, + self.preflight_authenticated_dict({"object_storage": True}), + ) self.assertDictContainsSubset({"Europe/Moscow": 3, "UTC": 0}, available_timezones) @pytest.mark.ee @@ -115,7 +125,10 @@ def test_cloud_preflight_request_unauthenticated(self): self.preflight_dict( { "email_service_available": True, - "slack_service": {"available": True, "client_id": "slack-client-id"}, + "slack_service": { + "available": True, + "client_id": "slack-client-id", + }, "can_create_org": True, "cloud": True, "realm": "cloud", @@ -142,7 +155,10 @@ def test_cloud_preflight_request(self): "cloud": True, "realm": "cloud", "region": "US", - "instance_preferences": {"debug_queries": False, "disable_paid_fs": False}, + "instance_preferences": { + "debug_queries": False, + "disable_paid_fs": False, + }, "site_url": "https://app.posthog.com", "email_service_available": True, "object_storage": True, @@ -184,7 +200,10 @@ def test_cloud_preflight_request_with_social_auth_providers(self): "cloud": True, "realm": "cloud", "region": "US", - "instance_preferences": {"debug_queries": False, "disable_paid_fs": True}, + "instance_preferences": { + "debug_queries": False, + "disable_paid_fs": True, + }, "site_url": "http://localhost:8000", "available_social_auth_providers": { "google-oauth2": True, @@ -206,7 +225,10 @@ def test_demo(self): response = self.client.get("/_preflight/") self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual(response.json(), self.preflight_dict({"demo": True, "can_create_org": True, "realm": "demo"})) + self.assertEqual( + response.json(), + self.preflight_dict({"demo": True, "can_create_org": True, "realm": "demo"}), + ) @pytest.mark.ee @pytest.mark.skip_on_multitenancy @@ -254,7 +276,9 @@ def test_can_create_org_with_multi_org(self): pass else: super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) TEST_clear_instance_license_cache() with self.settings(MULTI_ORG_ENABLED=True): @@ -271,7 +295,9 @@ def test_cloud_preflight_based_on_license(self): pass else: super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key::123", plan="cloud", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key::123", + plan="cloud", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) response = self.client.get("/_preflight/") diff --git a/posthog/api/test/test_prompt.py b/posthog/api/test/test_prompt.py index 8c97de5e6cf08..7a56060eb712d 100644 --- a/posthog/api/test/test_prompt.py +++ b/posthog/api/test/test_prompt.py @@ -7,7 +7,6 @@ def _setup_prompts() -> None: - prompt1 = Prompt.objects.create( step=0, type="tooltip", @@ -159,7 +158,12 @@ def test_webhook_invalid_data(self): "text": "We have prepared a list of suggestions and resources to improve your experience with the tool. You can access it at any time by clicking on the question mark icon in the top right corner of the screen, and then selecting 'How to be successful with PostHog'.", "placement": "bottom-start", "reference": "help-button", - "buttons": [{"action": "activation-checklist", "label": "Show me suggestions"}], + "buttons": [ + { + "action": "activation-checklist", + "label": "Show me suggestions", + } + ], } ], }, diff --git a/posthog/api/test/test_properties_timeline.py b/posthog/api/test/test_properties_timeline.py index 1bb6835501c13..5243151c27e09 100644 --- a/posthog/api/test/test_properties_timeline.py +++ b/posthog/api/test/test_properties_timeline.py @@ -8,7 +8,9 @@ from posthog.models.filters.mixins.base import BreakdownType from posthog.models.group.util import create_group -from posthog.queries.properties_timeline.properties_timeline import PropertiesTimelineResult +from posthog.queries.properties_timeline.properties_timeline import ( + PropertiesTimelineResult, +) from posthog.settings.dynamic_settings import CONSTANCE_CONFIG from posthog.test.base import ( APIBaseTest, @@ -34,12 +36,18 @@ def _create_actor(self, properties: dict) -> str: """Create actor of relevant type and return its UUID (for persons) or key (for groups).""" if actor_type == "person": person = _create_person( - team=self.team, distinct_ids=["abcd"], uuid=main_actor_id, properties=properties + team=self.team, + distinct_ids=["abcd"], + uuid=main_actor_id, + properties=properties, ) return str(person.uuid) else: group = create_group( - team_id=self.team.pk, group_type_index=0, group_key=str(main_actor_id), properties=properties + team_id=self.team.pk, + group_type_index=0, + group_key=str(main_actor_id), + properties=properties, ) return group.group_key @@ -282,7 +290,10 @@ def test_timeline_for_existing_actor_with_three_events_and_return_to_previous_va ) self._create_event( event="$pageview", - actor_properties={"foo": "abc", "bar": 456}, # Changed bar back to initial value + actor_properties={ + "foo": "abc", + "bar": 456, + }, # Changed bar back to initial value timestamp="2020-01-04T00:00:00Z", ) flush_persons_and_events() @@ -340,7 +351,10 @@ def test_timeline_for_existing_person_with_three_events_and_return_to_previous_v ) self._create_event( event="$pageview", - actor_properties={"foo": "abc", "bar": 456}, # Changed bar back to initial value + actor_properties={ + "foo": "abc", + "bar": 456, + }, # Changed bar back to initial value timestamp="2020-01-02T14:00:00Z", ) flush_persons_and_events() @@ -402,7 +416,10 @@ def test_timeline_for_existing_person_with_three_events_and_return_to_previous_v ) self._create_event( event="$pageview", - actor_properties={"foo": "abc", "bar": 456}, # Changed bar back to initial value + actor_properties={ + "foo": "abc", + "bar": 456, + }, # Changed bar back to initial value timestamp="2020-01-02T00:40:00Z", ) flush_persons_and_events() @@ -464,7 +481,10 @@ def test_timeline_for_existing_person_with_three_events_and_return_to_previous_v ) self._create_event( event="$pageview", - actor_properties={"foo": "abc", "bar": 456}, # Changed bar back to initial value + actor_properties={ + "foo": "abc", + "bar": 456, + }, # Changed bar back to initial value timestamp="2020-01-31T00:40:00Z", ) flush_persons_and_events() @@ -526,7 +546,10 @@ def test_timeline_for_existing_person_with_three_events_and_return_to_previous_v ) self._create_event( event="$pageview", - actor_properties={"foo": "abc", "bar": 456}, # Changed bar back to initial value + actor_properties={ + "foo": "abc", + "bar": 456, + }, # Changed bar back to initial value timestamp="2020-01-06T00:40:00Z", ) flush_persons_and_events() diff --git a/posthog/api/test/test_property_definition.py b/posthog/api/test/test_property_definition.py index 2bdf088e88256..512f43ce92d0c 100644 --- a/posthog/api/test/test_property_definition.py +++ b/posthog/api/test/test_property_definition.py @@ -4,12 +4,18 @@ from rest_framework import status from posthog.api.property_definition import PropertyDefinitionQuerySerializer -from posthog.models import EventDefinition, EventProperty, Organization, PropertyDefinition, Team, ActivityLog +from posthog.models import ( + EventDefinition, + EventProperty, + Organization, + PropertyDefinition, + Team, + ActivityLog, +) from posthog.test.base import APIBaseTest, BaseTest class TestPropertyDefinitionAPI(APIBaseTest): - EXPECTED_PROPERTY_DEFINITIONS: List[Dict[str, Union[str, Optional[int], bool]]] = [ {"name": "$browser", "is_numerical": False}, {"name": "$current_url", "is_numerical": False}, @@ -62,7 +68,10 @@ def test_list_property_definitions(self): self.assertEqual(len(response.json()["results"]), len(self.EXPECTED_PROPERTY_DEFINITIONS)) for item in self.EXPECTED_PROPERTY_DEFINITIONS: - response_item: Dict = next((_i for _i in response.json()["results"] if _i["name"] == item["name"]), {}) + response_item: Dict = next( + (_i for _i in response.json()["results"] if _i["name"] == item["name"]), + {}, + ) self.assertEqual(response_item["is_numerical"], item["is_numerical"]) def test_list_numerical_property_definitions(self): @@ -87,7 +96,9 @@ def test_pagination_of_property_definitions(self): self.assertEqual(len(response.json()["results"]), 100) # Default page size self.assertEqual(response.json()["results"][0]["name"], "$browser") self.assertEqual( - response.json()["results"][1]["name"], "$current_url", [r["name"] for r in response.json()["results"]] + response.json()["results"][1]["name"], + "$current_url", + [r["name"] for r in response.json()["results"]], ) property_checkpoints = [ @@ -105,7 +116,10 @@ def test_pagination_of_property_definitions(self): self.assertEqual( len(response.json()["results"]), 100 if i < 2 else 10 ) # Each page has 100 except the last one - self.assertEqual(response.json()["results"][0]["name"], f"z_property_{property_checkpoints[i]}") + self.assertEqual( + response.json()["results"][0]["name"], + f"z_property_{property_checkpoints[i]}", + ) def test_cant_see_property_definitions_for_another_team(self): org = Organization.objects.create(name="Separate Org") @@ -145,7 +159,10 @@ def test_query_property_definitions(self): response = self.client.get(f"/api/projects/{self.team.pk}/property_definitions/?search=firs") self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() - assert [r["name"] for r in response_data["results"]] == ["first_visit", "is_first_movie"] + assert [r["name"] for r in response_data["results"]] == [ + "first_visit", + "is_first_movie", + ] # Fuzzy search response = self.client.get(f"/api/projects/{self.team.pk}/property_definitions/?search=p ting") @@ -207,7 +224,10 @@ def test_query_property_definitions(self): def test_is_event_property_filter(self): response = self.client.get(f"/api/projects/{self.team.pk}/property_definitions/?search=firs") self.assertEqual(response.status_code, status.HTTP_200_OK) - assert [r["name"] for r in response.json()["results"]] == ["first_visit", "is_first_movie"] + assert [r["name"] for r in response.json()["results"]] == [ + "first_visit", + "is_first_movie", + ] # specifying the event name doesn't filter the list, # instead it checks if the property has been seen with that event @@ -219,7 +239,8 @@ def test_is_event_property_filter(self): # sort a list of tuples by the first element assert sorted( - [(r["name"], r["is_seen_on_filtered_events"]) for r in response.json()["results"]], key=lambda tup: tup[0] + [(r["name"], r["is_seen_on_filtered_events"]) for r in response.json()["results"]], + key=lambda tup: tup[0], ) == [ ("$browser", True), ("$current_url", False), @@ -239,7 +260,8 @@ def test_is_event_property_filter(self): ) self.assertEqual(response.status_code, status.HTTP_200_OK) assert sorted( - [(r["name"], r["is_seen_on_filtered_events"]) for r in response.json()["results"]], key=lambda tup: tup[0] + [(r["name"], r["is_seen_on_filtered_events"]) for r in response.json()["results"]], + key=lambda tup: tup[0], ) == [ ("$browser", True), ("first_visit", True), @@ -251,27 +273,41 @@ def test_is_event_property_filter(self): ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - [(r["name"], r["is_seen_on_filtered_events"]) for r in response.json()["results"]], [("first_visit", True)] + [(r["name"], r["is_seen_on_filtered_events"]) for r in response.json()["results"]], + [("first_visit", True)], ) def test_person_property_filter(self): PropertyDefinition.objects.create( - team=self.team, name="event property", property_type="String", type=PropertyDefinition.Type.EVENT + team=self.team, + name="event property", + property_type="String", + type=PropertyDefinition.Type.EVENT, ) PropertyDefinition.objects.create( - team=self.team, name="person property", property_type="String", type=PropertyDefinition.Type.PERSON + team=self.team, + name="person property", + property_type="String", + type=PropertyDefinition.Type.PERSON, ) PropertyDefinition.objects.create( - team=self.team, name="$initial_referrer", property_type="String", type=PropertyDefinition.Type.PERSON + team=self.team, + name="$initial_referrer", + property_type="String", + type=PropertyDefinition.Type.PERSON, ) # We want to hide this property on events, but not on persons PropertyDefinition.objects.create( - team=self.team, name="another", property_type="String", type=PropertyDefinition.Type.PERSON + team=self.team, + name="another", + property_type="String", + type=PropertyDefinition.Type.PERSON, ) response = self.client.get(f"/api/projects/{self.team.pk}/property_definitions/?type=person") self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertEqual( - [row["name"] for row in response.json()["results"]], ["$initial_referrer", "another", "person property"] + [row["name"] for row in response.json()["results"]], + ["$initial_referrer", "another", "person property"], ) response = self.client.get(f"/api/projects/{self.team.pk}/property_definitions/?type=person&search=prop") @@ -307,7 +343,10 @@ def test_group_property_filter(self): response = self.client.get(f"/api/projects/{self.team.pk}/property_definitions/?type=group&group_type_index=1") self.assertEqual(response.status_code, status.HTTP_200_OK) - self.assertEqual([row["name"] for row in response.json()["results"]], ["group1 another", "group1 property"]) + self.assertEqual( + [row["name"] for row in response.json()["results"]], + ["group1 another", "group1 property"], + ) response = self.client.get(f"/api/projects/{self.team.pk}/property_definitions/?type=group&group_type_index=2") self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -352,7 +391,11 @@ def test_delete_property_definition(self, mock_capture): self.user.distinct_id, "property definition deleted", properties={"name": "test_property", "type": "event"}, - groups={"instance": ANY, "organization": str(self.organization.id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.organization.id), + "project": str(self.team.uuid), + }, ) activity_log: Optional[ActivityLog] = ActivityLog.objects.first() diff --git a/posthog/api/test/test_query.py b/posthog/api/test/test_query.py index 180eef00dd354..b49cd25b83287 100644 --- a/posthog/api/test/test_query.py +++ b/posthog/api/test/test_query.py @@ -40,23 +40,55 @@ def test_select_hogql_expressions(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2020-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:13:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val3"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val3"}, + ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): - query = EventsQuery(select=["properties.key", "event", "distinct_id", "concat(event, ' ', properties.key)"]) + query = EventsQuery( + select=[ + "properties.key", + "event", + "distinct_id", + "concat(event, ' ', properties.key)", + ] + ) response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json() self.assertEqual( response, response | { - "columns": ["properties.key", "event", "distinct_id", "concat(event, ' ', properties.key)"], + "columns": [ + "properties.key", + "event", + "distinct_id", + "concat(event, ' ', properties.key)", + ], "hasMore": False, "results": [ ["test_val1", "sign up", "2", "sign up test_val1"], @@ -115,18 +147,44 @@ def test_hogql_property_filter(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2020-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="3", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="3", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:13:00"): - _create_event(team=self.team, event="sign out", distinct_id="4", properties={"key": "test_val3"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="4", + properties={"key": "test_val3"}, + ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): query = EventsQuery( - select=["event", "distinct_id", "properties.key", "'a%sd'", "concat(event, ' ', properties.key)"] + select=[ + "event", + "distinct_id", + "properties.key", + "'a%sd'", + "concat(event, ' ', properties.key)", + ] ) response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json() @@ -154,32 +212,66 @@ def test_event_property_filter(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2020-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="3", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="3", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:13:00"): _create_event( - team=self.team, event="sign out", distinct_id="4", properties={"key": "test_val3", "path": "a/b/c"} + team=self.team, + event="sign out", + distinct_id="4", + properties={"key": "test_val3", "path": "a/b/c"}, ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): query = EventsQuery( - select=["event", "distinct_id", "properties.key", "'a%sd'", "concat(event, ' ', properties.key)"] + select=[ + "event", + "distinct_id", + "properties.key", + "'a%sd'", + "concat(event, ' ', properties.key)", + ] ) response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json() self.assertEqual(len(response["results"]), 4) query.properties = [ - EventPropertyFilter(type="event", key="key", value="test_val3", operator=PropertyOperator.exact) + EventPropertyFilter( + type="event", + key="key", + value="test_val3", + operator=PropertyOperator.exact, + ) ] response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json() self.assertEqual(len(response["results"]), 1) query.properties = [ - EventPropertyFilter(type="event", key="path", value="/", operator=PropertyOperator.icontains) + EventPropertyFilter( + type="event", + key="path", + value="/", + operator=PropertyOperator.icontains, + ) ] response = self.client.post(f"/api/projects/{self.team.id}/query/", {"query": query.dict()}).json() self.assertEqual(len(response["results"]), 1) @@ -194,21 +286,50 @@ def test_person_property_filter(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2020-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="3", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="3", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:13:00"): - _create_event(team=self.team, event="sign out", distinct_id="4", properties={"key": "test_val3"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="4", + properties={"key": "test_val3"}, + ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): query = EventsQuery( - select=["event", "distinct_id", "properties.key", "'a%sd'", "concat(event, ' ', properties.key)"], + select=[ + "event", + "distinct_id", + "properties.key", + "'a%sd'", + "concat(event, ' ', properties.key)", + ], properties=[ PersonPropertyFilter( - type="person", key="email", value="tom@posthog.com", operator=PropertyOperator.exact + type="person", + key="email", + value="tom@posthog.com", + operator=PropertyOperator.exact, ) ], ) @@ -293,14 +414,32 @@ def test_property_filter_aggregations(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2020-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="3", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="3", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:13:00"): _create_event( - team=self.team, event="sign out", distinct_id="4", properties={"key": "test_val3", "path": "a/b/c"} + team=self.team, + event="sign out", + distinct_id="4", + properties={"key": "test_val3", "path": "a/b/c"}, ) flush_persons_and_events() @@ -322,14 +461,32 @@ def test_select_event_person(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2020-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="3", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="3", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:13:00"): _create_event( - team=self.team, event="sign out", distinct_id="4", properties={"key": "test_val3", "path": "a/b/c"} + team=self.team, + event="sign out", + distinct_id="4", + properties={"key": "test_val3", "path": "a/b/c"}, ) flush_persons_and_events() @@ -359,14 +516,32 @@ def test_events_query_all_time_date(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2021-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2022-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="3", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="3", + properties={"key": "test_val2"}, + ) with freeze_time("2023-01-10 12:13:00"): _create_event( - team=self.team, event="sign out", distinct_id="4", properties={"key": "test_val3", "path": "a/b/c"} + team=self.team, + event="sign out", + distinct_id="4", + properties={"key": "test_val3", "path": "a/b/c"}, ) flush_persons_and_events() @@ -393,14 +568,32 @@ def test_full_hogql_query(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2020-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="3", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="3", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:13:00"): _create_event( - team=self.team, event="sign out", distinct_id="4", properties={"key": "test_val3", "path": "a/b/c"} + team=self.team, + event="sign out", + distinct_id="4", + properties={"key": "test_val3", "path": "a/b/c"}, ) flush_persons_and_events() @@ -426,7 +619,12 @@ def test_full_hogql_query_limit(self, MAX_SELECT_RETURNED_ROWS=15, DEFAULT_RETUR random_uuid = str(UUIDT()) with freeze_time("2020-01-10 12:00:00"): for _ in range(20): - _create_event(team=self.team, event="sign up", distinct_id=random_uuid, properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id=random_uuid, + properties={"key": "test_val1"}, + ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): @@ -445,7 +643,12 @@ def test_full_hogql_query_limit_exported(self, MAX_SELECT_RETURNED_ROWS=15, DEFA random_uuid = str(UUIDT()) with freeze_time("2020-01-10 12:00:00"): for _ in range(20): - _create_event(team=self.team, event="sign up", distinct_id=random_uuid, properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id=random_uuid, + properties={"key": "test_val1"}, + ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): @@ -465,13 +668,22 @@ def test_full_events_query_limit(self, MAX_SELECT_RETURNED_ROWS=15, DEFAULT_RETU random_uuid = str(UUIDT()) with freeze_time("2020-01-10 12:00:00"): for _ in range(20): - _create_event(team=self.team, event="sign up", distinct_id=random_uuid, properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id=random_uuid, + properties={"key": "test_val1"}, + ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): response = process_query( team=self.team, - query_json={"kind": "EventsQuery", "select": ["event"], "where": [f"distinct_id = '{random_uuid}'"]}, + query_json={ + "kind": "EventsQuery", + "select": ["event"], + "where": [f"distinct_id = '{random_uuid}'"], + }, ) self.assertEqual(len(response.get("results", [])), 10) @@ -482,13 +694,22 @@ def test_full_events_query_limit_exported(self, MAX_SELECT_RETURNED_ROWS=15, DEF random_uuid = str(UUIDT()) with freeze_time("2020-01-10 12:00:00"): for _ in range(20): - _create_event(team=self.team, event="sign up", distinct_id=random_uuid, properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id=random_uuid, + properties={"key": "test_val1"}, + ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): response = process_query( team=self.team, - query_json={"kind": "EventsQuery", "select": ["event"], "where": [f"distinct_id = '{random_uuid}'"]}, + query_json={ + "kind": "EventsQuery", + "select": ["event"], + "where": [f"distinct_id = '{random_uuid}'"], + }, in_export_context=True, ) @@ -505,7 +726,14 @@ def test_property_definition_annotation_does_not_break_things(self): "select": ["event"], # This used to cause query failure when tried to add an annotation for a node without location # (which properties.$browser is in this case) - "properties": [{"type": "event", "key": "$browser", "operator": "is_not", "value": "Foo"}], + "properties": [ + { + "type": "event", + "key": "$browser", + "operator": "is_not", + "value": "Foo", + } + ], }, ) self.assertEqual(response.get("columns"), ["event"]) @@ -526,14 +754,32 @@ def test_full_hogql_query_view(self): team=self.team, immediate=True, ) - _create_event(team=self.team, event="sign up", distinct_id="2", properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="2", + properties={"key": "test_val1"}, + ) with freeze_time("2020-01-10 12:11:00"): - _create_event(team=self.team, event="sign out", distinct_id="2", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="2", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:12:00"): - _create_event(team=self.team, event="sign out", distinct_id="3", properties={"key": "test_val2"}) + _create_event( + team=self.team, + event="sign out", + distinct_id="3", + properties={"key": "test_val2"}, + ) with freeze_time("2020-01-10 12:13:00"): _create_event( - team=self.team, event="sign out", distinct_id="4", properties={"key": "test_val3", "path": "a/b/c"} + team=self.team, + event="sign out", + distinct_id="4", + properties={"key": "test_val3", "path": "a/b/c"}, ) flush_persons_and_events() @@ -567,7 +813,12 @@ def test_full_hogql_query_values(self): random_uuid = str(UUIDT()) with freeze_time("2020-01-10 12:00:00"): for _ in range(20): - _create_event(team=self.team, event="sign up", distinct_id=random_uuid, properties={"key": "test_val1"}) + _create_event( + team=self.team, + event="sign up", + distinct_id=random_uuid, + properties={"key": "test_val1"}, + ) flush_persons_and_events() with freeze_time("2020-01-10 12:14:00"): diff --git a/posthog/api/test/test_sharing.py b/posthog/api/test/test_sharing.py index 8e4fb2cebbdb5..a38c3e04dabfe 100644 --- a/posthog/api/test/test_sharing.py +++ b/posthog/api/test/test_sharing.py @@ -19,7 +19,10 @@ @parameterized.expand( [ ["http://localhost:8000/something", "http://localhost:8000/something.png"], - ["http://localhost:8000/something?query=string", "http://localhost:8000/something.png?query=string"], + [ + "http://localhost:8000/something?query=string", + "http://localhost:8000/something.png?query=string", + ], [ "http://localhost:8000/something?query=string&another=one", "http://localhost:8000/something.png?query=string&another=one", @@ -28,7 +31,10 @@ "http://localhost:8000/something?query=string&another=one#withhash", "http://localhost:8000/something.png?query=string&another=one#withhash", ], - ["http://localhost:8000/something#withhash", "http://localhost:8000/something.png#withhash"], + [ + "http://localhost:8000/something#withhash", + "http://localhost:8000/something.png#withhash", + ], ] ) def test_shared_image_alternative(url: str, expected_url: str) -> None: @@ -39,7 +45,10 @@ class TestSharing(APIBaseTest): dashboard: Dashboard = None # type: ignore insight: Insight = None # type: ignore - insight_filter_dict = {"events": [{"id": "$pageview"}], "properties": [{"key": "$browser", "value": "Mac OS X"}]} + insight_filter_dict = { + "events": [{"id": "$pageview"}], + "properties": [{"key": "$browser", "value": "Mac OS X"}], + } @classmethod def setUpTestData(cls): @@ -47,7 +56,9 @@ def setUpTestData(cls): cls.dashboard = Dashboard.objects.create(team=cls.team, name="example dashboard", created_by=cls.user) cls.insight = Insight.objects.create( - filters=Filter(data=cls.insight_filter_dict).to_dict(), team=cls.team, created_by=cls.user + filters=Filter(data=cls.insight_filter_dict).to_dict(), + team=cls.team, + created_by=cls.user, ) @freeze_time("2022-01-01") @@ -59,14 +70,19 @@ def test_gets_sharing_config(self, patched_exporter_task: Mock): assert SharingConfiguration.objects.count() == 0 assert response.status_code == status.HTTP_200_OK data = response.json() - assert data == {"access_token": data["access_token"], "created_at": None, "enabled": False} + assert data == { + "access_token": data["access_token"], + "created_at": None, + "enabled": False, + } @freeze_time("2022-01-01") @patch("posthog.api.exports.exporter.export_asset.delay") def test_does_not_change_token_when_toggling_enabled_state(self, patched_exporter_task: Mock): assert SharingConfiguration.objects.count() == 0 response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{self.dashboard.id}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/dashboards/{self.dashboard.id}/sharing", + {"enabled": True}, ) initial_data = response.json() assert SharingConfiguration.objects.count() == 1 @@ -78,7 +94,8 @@ def test_does_not_change_token_when_toggling_enabled_state(self, patched_exporte } response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{self.dashboard.id}/sharing", {"enabled": False} + f"/api/projects/{self.team.id}/dashboards/{self.dashboard.id}/sharing", + {"enabled": False}, ) assert response.json() == { "access_token": initial_data["access_token"], @@ -89,7 +106,8 @@ def test_does_not_change_token_when_toggling_enabled_state(self, patched_exporte @patch("posthog.api.exports.exporter.export_asset.delay") def test_can_edit_enabled_state(self, patched_exporter_task: Mock): response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{self.dashboard.id}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/dashboards/{self.dashboard.id}/sharing", + {"enabled": True}, ) data = response.json() assert response.status_code == status.HTTP_200_OK @@ -105,14 +123,16 @@ def test_can_edit_enabled_state_for_insight(self, patched_exporter_task: Mock): assert ActivityLog.objects.count() == 0 response = self.client.patch( - f"/api/projects/{self.team.id}/insights/{self.insight.id}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/insights/{self.insight.id}/sharing", + {"enabled": True}, ) data = response.json() assert response.status_code == status.HTTP_200_OK assert data["enabled"] response = self.client.patch( - f"/api/projects/{self.team.id}/insights/{self.insight.id}/sharing", {"enabled": False} + f"/api/projects/{self.team.id}/insights/{self.insight.id}/sharing", + {"enabled": False}, ) data = response.json() assert response.status_code == status.HTTP_200_OK @@ -128,7 +148,10 @@ def test_can_edit_enabled_state_for_insight(self, patched_exporter_task: Mock): def test_exports_image_when_sharing(self, patched_exporter_task: Mock): assert ExportedAsset.objects.count() == 0 - self.client.patch(f"/api/projects/{self.team.id}/dashboards/{self.dashboard.id}/sharing", {"enabled": True}) + self.client.patch( + f"/api/projects/{self.team.id}/dashboards/{self.dashboard.id}/sharing", + {"enabled": True}, + ) assert ExportedAsset.objects.count() == 1 asset = ExportedAsset.objects.first() @@ -172,7 +195,8 @@ def test_should_not_be_affected_by_collaboration_rules(self, _patched_exporter_t ) response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/dashboards/{dashboard.id}/sharing", + {"enabled": True}, ) assert response.status_code == 200 @@ -180,10 +204,15 @@ def test_should_not_be_affected_by_collaboration_rules(self, _patched_exporter_t @patch("posthog.api.exports.exporter.export_asset.delay") def test_should_not_get_deleted_item(self, _patched_exporter_task: Mock): dashboard = Dashboard.objects.create( - team=self.team, name="example dashboard", created_by=self.user, share_token="my_test_token", is_shared=True + team=self.team, + name="example dashboard", + created_by=self.user, + share_token="my_test_token", + is_shared=True, ) response = self.client.patch( - f"/api/projects/{self.team.id}/dashboards/{dashboard.id}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/dashboards/{dashboard.id}/sharing", + {"enabled": True}, ) response = self.client.get(f"/shared_dashboard/my_test_token") assert response.status_code == 200 @@ -201,7 +230,11 @@ def test_should_not_get_deleted_item(self, _patched_exporter_task: Mock): @patch("posthog.models.exported_asset.object_storage.read_bytes") @patch("posthog.api.sharing.asset_for_token") def test_can_get_shared_dashboard_asset_with_no_content_but_content_location( - self, url: str, patched_asset_for_token, patched_object_storage, _patched_exporter_task: Mock + self, + url: str, + patched_asset_for_token, + patched_object_storage, + _patched_exporter_task: Mock, ) -> None: asset = ExportedAsset.objects.create( team_id=self.team.id, @@ -234,7 +267,8 @@ def test_shared_thing_can_generate_open_graph_image( assert ExportedAsset.objects.count() == 0 share_response = self.client.patch( - f"/api/projects/{self.team.id}/{type}/{target.pk}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/{type}/{target.pk}/sharing", + {"enabled": True}, ) access_token = share_response.json()["access_token"] @@ -258,7 +292,8 @@ def test_shared_thing_can_reuse_existing_generated_open_graph_image( target = self.insight if type == "insights" else self.dashboard share_response = self.client.patch( - f"/api/projects/{self.team.id}/{type}/{target.pk}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/{type}/{target.pk}/sharing", + {"enabled": True}, ) access_token = share_response.json()["access_token"] @@ -300,7 +335,8 @@ def test_shared_insight_can_regenerate_stale_existing_generated_open_graph_image time_in_the_past = now() - timedelta(hours=4) with freeze_time(time_in_the_past): share_response = self.client.patch( - f"/api/projects/{self.team.id}/{type}/{target.pk}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/{type}/{target.pk}/sharing", + {"enabled": True}, ) # enabling creates an asset assert ExportedAsset.objects.count() == 1 diff --git a/posthog/api/test/test_signup.py b/posthog/api/test/test_signup.py index e3626f43a0188..8ad336872972d 100644 --- a/posthog/api/test/test_signup.py +++ b/posthog/api/test/test_signup.py @@ -20,7 +20,11 @@ from posthog.test.base import APIBaseTest from posthog.utils import get_instance_realm -MOCK_GITLAB_SSO_RESPONSE = {"access_token": "123", "email": "testemail@posthog.com", "name": "John Doe"} +MOCK_GITLAB_SSO_RESPONSE = { + "access_token": "123", + "email": "testemail@posthog.com", + "name": "John Doe", +} class TestSignupAPI(APIBaseTest): @@ -105,13 +109,20 @@ def test_signup_disallowed_on_email_collision(self): User.objects.create(email="fake@posthog.com", first_name="Jane") response = self.client.post( - "/api/signup/", {"first_name": "John", "email": "fake@posthog.com", "password": "notsecure"} + "/api/signup/", + { + "first_name": "John", + "email": "fake@posthog.com", + "password": "notsecure", + }, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), self.validation_error_response( - "There is already an account with this email address.", code="unique", attr="email" + "There is already an account with this email address.", + code="unique", + attr="email", ), ) self.assertEqual(User.objects.count(), 1) @@ -120,11 +131,21 @@ def test_signup_disallowed_on_email_collision(self): def test_signup_disallowed_on_self_hosted_by_default(self): with self.is_cloud(False): response = self.client.post( - "/api/signup/", {"first_name": "Jane", "email": "hedgehog2@posthog.com", "password": "notsecure"} + "/api/signup/", + { + "first_name": "Jane", + "email": "hedgehog2@posthog.com", + "password": "notsecure", + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) response = self.client.post( - "/api/signup/", {"first_name": "Jane", "email": "hedgehog2@posthog.com", "password": "notsecure"} + "/api/signup/", + { + "first_name": "Jane", + "email": "hedgehog2@posthog.com", + "password": "notsecure", + }, ) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( @@ -146,7 +167,9 @@ def test_signup_allowed_on_self_hosted_with_env_var(self): pass else: super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) Organization.objects.create(name="name") @@ -156,7 +179,11 @@ def test_signup_allowed_on_self_hosted_with_env_var(self): with self.settings(MULTI_ORG_ENABLED=True): response = self.client.post( "/api/signup/", - {"first_name": "Jane", "email": "hedgehog4@posthog.com", "password": "notsecure"}, + { + "first_name": "Jane", + "email": "hedgehog4@posthog.com", + "password": "notsecure", + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(response.json()["email"], "hedgehog4@posthog.com") @@ -166,7 +193,12 @@ def test_signup_allowed_on_self_hosted_with_env_var(self): @patch("posthoganalytics.capture") def test_signup_minimum_attrs(self, mock_capture): response = self.client.post( - "/api/signup/", {"first_name": "Jane", "email": "hedgehog2@posthog.com", "password": "notsecure"} + "/api/signup/", + { + "first_name": "Jane", + "email": "hedgehog2@posthog.com", + "password": "notsecure", + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -221,12 +253,20 @@ def test_cant_sign_up_without_required_attributes(self): required_attributes = ["first_name", "email", "password"] for attribute in required_attributes: - body = {"first_name": "Jane", "email": "invalid@posthog.com", "password": "notsecure"} + body = { + "first_name": "Jane", + "email": "invalid@posthog.com", + "password": "notsecure", + } body.pop(attribute) # Make sure the endpoint works with and without the trailing slash response = self.client.post("/api/signup", body) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, f"{attribute} is required") + self.assertEqual( + response.status_code, + status.HTTP_400_BAD_REQUEST, + f"{attribute} is required", + ) self.assertEqual( response.json(), { @@ -258,7 +298,11 @@ def test_cant_sign_up_with_required_attributes_null(self): body[attribute] = None response = self.client.post("/api/signup/", body) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, f"{attribute} may not be null") + self.assertEqual( + response.status_code, + status.HTTP_400_BAD_REQUEST, + f"{attribute} may not be null", + ) self.assertEqual( response.json(), { @@ -279,7 +323,8 @@ def test_cant_sign_up_with_short_password(self): team_count: int = Team.objects.count() response = self.client.post( - "/api/signup/", {"first_name": "Jane", "email": "failed@posthog.com", "password": "123"} + "/api/signup/", + {"first_name": "Jane", "email": "failed@posthog.com", "password": "123"}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -302,7 +347,12 @@ def test_default_dashboard_is_created_on_signup(self): """ response = self.client.post( - "/api/signup/", {"first_name": "Jane", "email": "hedgehog75@posthog.com", "password": "notsecure"} + "/api/signup/", + { + "first_name": "Jane", + "email": "hedgehog75@posthog.com", + "password": "notsecure", + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -338,10 +388,15 @@ def test_api_can_use_social_login_to_create_organization_if_enabled(self, mock_r pass else: super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) - with self.settings(SOCIAL_AUTH_GITLAB_KEY="gitlab_123", SOCIAL_AUTH_GITLAB_SECRET="gitlab_secret"): + with self.settings( + SOCIAL_AUTH_GITLAB_KEY="gitlab_123", + SOCIAL_AUTH_GITLAB_SECRET="gitlab_secret", + ): response = self.client.get(reverse("social:begin", kwargs={"backend": "gitlab"})) self.assertEqual(response.status_code, status.HTTP_302_FOUND) @@ -369,10 +424,15 @@ def test_api_cannot_use_social_login_to_create_organization_if_disabled(self, mo pass else: super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) - with self.settings(SOCIAL_AUTH_GITLAB_KEY="gitlab_123", SOCIAL_AUTH_GITLAB_SECRET="gitlab_secret"): + with self.settings( + SOCIAL_AUTH_GITLAB_KEY="gitlab_123", + SOCIAL_AUTH_GITLAB_SECRET="gitlab_secret", + ): response = self.client.get(reverse("social:begin", kwargs={"backend": "gitlab"})) self.assertEqual(response.status_code, status.HTTP_302_FOUND) @@ -389,7 +449,10 @@ def test_api_cannot_use_social_login_to_create_organization_if_disabled(self, mo @mock.patch("social_core.backends.base.BaseAuth.request") @pytest.mark.ee def test_api_social_login_to_create_organization(self, mock_request): - with self.settings(SOCIAL_AUTH_GITHUB_KEY="github_123", SOCIAL_AUTH_GITHUB_SECRET="github_secret"): + with self.settings( + SOCIAL_AUTH_GITHUB_KEY="github_123", + SOCIAL_AUTH_GITHUB_SECRET="github_secret", + ): response = self.client.get(reverse("social:begin", kwargs={"backend": "github"})) self.assertEqual(response.status_code, status.HTTP_302_FOUND) @@ -446,7 +509,10 @@ def run_test_for_whitelisted_domain(self, mock_sso_providers, mock_request, mock url = reverse("social:complete", kwargs={"backend": "google-oauth2"}) url += f"?code=2&state={response.client.session['google-oauth2_state']}" - mock_request.return_value.json.return_value = {"access_token": "123", "email": "jane@hogflix.posthog.com"} + mock_request.return_value.json.return_value = { + "access_token": "123", + "email": "jane@hogflix.posthog.com", + } response = self.client.get(url, follow=True) self.assertEqual(response.status_code, status.HTTP_200_OK) # because `follow=True` @@ -482,7 +548,12 @@ def test_social_signup_with_whitelisted_domain_on_self_hosted( @mock.patch("posthog.tasks.user_identify.identify_task") @pytest.mark.ee def test_social_signup_with_whitelisted_domain_on_cloud( - self, mock_identify, mock_sso_providers, mock_request, mock_update_distinct_ids, mock_capture + self, + mock_identify, + mock_sso_providers, + mock_request, + mock_update_distinct_ids, + mock_capture, ): with self.is_cloud(True): self.run_test_for_whitelisted_domain(mock_sso_providers, mock_request, mock_capture) @@ -513,7 +584,10 @@ def test_social_signup_with_whitelisted_domain_on_cloud_reverse(self, mock_sso_p url = reverse("social:complete", kwargs={"backend": "google-oauth2"}) url += f"?code=2&state={response.client.session['google-oauth2_state']}" - mock_request.return_value.json.return_value = {"access_token": "123", "email": "jane@hogflix.posthog.com"} + mock_request.return_value.json.return_value = { + "access_token": "123", + "email": "jane@hogflix.posthog.com", + } response = self.client.get(url, follow=True) self.assertEqual(response.status_code, status.HTTP_200_OK) # because `follow=True` @@ -540,7 +614,10 @@ def test_cannot_social_signup_with_whitelisted_but_jit_provisioning_disabled( mock_sso_providers.return_value = {"google-oauth2": True} new_org = Organization.objects.create(name="Test org") OrganizationDomain.objects.create( - domain="posthog.net", verified_at=timezone.now(), jit_provisioning_enabled=False, organization=new_org + domain="posthog.net", + verified_at=timezone.now(), + jit_provisioning_enabled=False, + organization=new_org, ) # note `jit_provisioning_enabled=False` response = self.client.get(reverse("social:begin", kwargs={"backend": "google-oauth2"})) @@ -548,7 +625,10 @@ def test_cannot_social_signup_with_whitelisted_but_jit_provisioning_disabled( url = reverse("social:complete", kwargs={"backend": "google-oauth2"}) url += f"?code=2&state={response.client.session['google-oauth2_state']}" - mock_request.return_value.json.return_value = {"access_token": "123", "email": "alice@posthog.net"} + mock_request.return_value.json.return_value = { + "access_token": "123", + "email": "alice@posthog.net", + } response = self.client.get(url, follow=True) self.assertEqual(response.status_code, status.HTTP_200_OK) # because `follow=True` @@ -563,7 +643,10 @@ def test_cannot_social_signup_with_whitelisted_but_unverified_domain(self, mock_ mock_sso_providers.return_value = {"google-oauth2": True} new_org = Organization.objects.create(name="Test org") OrganizationDomain.objects.create( - domain="posthog.net", verified_at=None, jit_provisioning_enabled=True, organization=new_org + domain="posthog.net", + verified_at=None, + jit_provisioning_enabled=True, + organization=new_org, ) # note `verified_at=None` response = self.client.get(reverse("social:begin", kwargs={"backend": "google-oauth2"})) @@ -571,7 +654,10 @@ def test_cannot_social_signup_with_whitelisted_but_unverified_domain(self, mock_ url = reverse("social:complete", kwargs={"backend": "google-oauth2"}) url += f"?code=2&state={response.client.session['google-oauth2_state']}" - mock_request.return_value.json.return_value = {"access_token": "123", "email": "alice@posthog.net"} + mock_request.return_value.json.return_value = { + "access_token": "123", + "email": "alice@posthog.net", + } response = self.client.get(url, follow=True) self.assertEqual(response.status_code, status.HTTP_200_OK) # because `follow=True` @@ -586,7 +672,10 @@ def test_api_cannot_use_whitelist_for_different_domain(self, mock_sso_providers, mock_sso_providers.return_value = {"google-oauth2": True} new_org = Organization.objects.create(name="Test org") OrganizationDomain.objects.create( - domain="good.com", verified_at=timezone.now(), jit_provisioning_enabled=True, organization=new_org + domain="good.com", + verified_at=timezone.now(), + jit_provisioning_enabled=True, + organization=new_org, ) response = self.client.get(reverse("social:begin", kwargs={"backend": "google-oauth2"})) @@ -619,7 +708,10 @@ def test_social_signup_to_existing_org_without_whitelisted_domain_on_cloud(self, url = reverse("social:complete", kwargs={"backend": "google-oauth2"}) url += f"?code=2&state={response.client.session['google-oauth2_state']}" - mock_request.return_value.json.return_value = {"access_token": "123", "email": "jane@hogflix.posthog.com"} + mock_request.return_value.json.return_value = { + "access_token": "123", + "email": "jane@hogflix.posthog.com", + } response = self.client.get(url, follow=True) self.assertEqual(response.status_code, status.HTTP_200_OK) # because `follow=True` @@ -661,7 +753,9 @@ def test_api_invite_sign_up_prevalidate(self): def test_api_invite_sign_up_with_first_name_prevalidate(self): invite: OrganizationInvite = OrganizationInvite.objects.create( - target_email="test+58@posthog.com", organization=self.organization, first_name="Jane" + target_email="test+58@posthog.com", + organization=self.organization, + first_name="Jane", ) response = self.client.get(f"/api/signup/{invite.id}/") @@ -782,7 +876,10 @@ def test_api_invite_sign_up(self, mock_capture): # User is now a member of the organization self.assertEqual(user.organization_memberships.count(), 1) - self.assertEqual(user.organization_memberships.first().organization, self.organization) # type: ignore + self.assertEqual( + user.organization_memberships.first().organization, # type: ignore + self.organization, + ) # Defaults are set correctly self.assertEqual(user.organization, self.organization) @@ -797,13 +894,19 @@ def test_api_invite_sign_up(self, mock_capture): mock_capture.assert_called_once() self.assertEqual(user.distinct_id, mock_capture.call_args.args[0]) self.assertEqual("user signed up", mock_capture.call_args.args[1]) - self.assertEqual("Engineering", mock_capture.call_args[1]["properties"]["role_at_organization"]) + self.assertEqual( + "Engineering", + mock_capture.call_args[1]["properties"]["role_at_organization"], + ) # Assert that key properties were set properly event_props = mock_capture.call_args.kwargs["properties"] self.assertEqual(event_props["is_first_user"], False) self.assertEqual(event_props["is_organization_first_user"], False) self.assertEqual(event_props["new_onboarding_enabled"], False) - self.assertEqual(event_props["signup_backend_processor"], "OrganizationInviteSignupSerializer") + self.assertEqual( + event_props["signup_backend_processor"], + "OrganizationInviteSignupSerializer", + ) self.assertEqual(event_props["signup_social_provider"], "") self.assertEqual(event_props["realm"], get_instance_realm()) @@ -828,7 +931,8 @@ def test_api_invite_sign_up_where_there_are_no_default_non_private_projects(self self.team.save() response = self.client.post( - f"/api/signup/{invite.id}/", {"first_name": "Alice", "password": "test_password", "email_opt_in": True} + f"/api/signup/{invite.id}/", + {"first_name": "Alice", "password": "test_password", "email_opt_in": True}, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) user = cast(User, User.objects.order_by("-pk")[0]) @@ -846,9 +950,13 @@ def test_api_invite_sign_up_where_default_project_is_private(self): self.team.save() team = Team.objects.create(name="Public project", organization=self.organization, access_control=False) invite: OrganizationInvite = OrganizationInvite.objects.create( - target_email="test+privatepublic@posthog.com", organization=self.organization + target_email="test+privatepublic@posthog.com", + organization=self.organization, + ) + response = self.client.post( + f"/api/signup/{invite.id}/", + {"first_name": "Charlie", "password": "test_password"}, ) - response = self.client.post(f"/api/signup/{invite.id}/", {"first_name": "Charlie", "password": "test_password"}) self.assertEqual(response.status_code, status.HTTP_201_CREATED) user = cast(User, User.objects.order_by("-pk")[0]) self.assertEqual(user.organization_memberships.count(), 1) @@ -861,9 +969,18 @@ def test_api_invite_sign_up_member_joined_email_is_not_sent_for_initial_member(s target_email="test+100@posthog.com", organization=self.organization ) - with self.settings(EMAIL_ENABLED=True, EMAIL_HOST="localhost", SITE_URL="http://test.posthog.com"): + with self.settings( + EMAIL_ENABLED=True, + EMAIL_HOST="localhost", + SITE_URL="http://test.posthog.com", + ): response = self.client.post( - f"/api/signup/{invite.id}/", {"first_name": "Alice", "password": "test_password", "email_opt_in": True} + f"/api/signup/{invite.id}/", + { + "first_name": "Alice", + "password": "test_password", + "email_opt_in": True, + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -881,7 +998,11 @@ def test_api_invite_sign_up_member_joined_email_is_sent_for_next_members(self): with self.settings(EMAIL_ENABLED=True, SITE_URL="http://test.posthog.com"): response = self.client.post( f"/api/signup/{invite.id}/", - {"first_name": "Alice", "password": "test_password", "email_opt_in": True}, + { + "first_name": "Alice", + "password": "test_password", + "email_opt_in": True, + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -902,9 +1023,18 @@ def test_api_invite_sign_up_member_joined_email_is_not_sent_if_disabled(self): target_email="test+100@posthog.com", organization=self.organization ) - with self.settings(EMAIL_ENABLED=True, EMAIL_HOST="localhost", SITE_URL="http://test.posthog.com"): + with self.settings( + EMAIL_ENABLED=True, + EMAIL_HOST="localhost", + SITE_URL="http://test.posthog.com", + ): response = self.client.post( - f"/api/signup/{invite.id}/", {"first_name": "Alice", "password": "test_password", "email_opt_in": True} + f"/api/signup/{invite.id}/", + { + "first_name": "Alice", + "password": "test_password", + "email_opt_in": True, + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -931,7 +1061,9 @@ def test_existing_user_can_sign_up_to_a_new_organization(self, mock_update_disti pass else: super(LicenseManager, cast(LicenseManager, License.objects)).create( - key="key_123", plan="enterprise", valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7) + key="key_123", + plan="enterprise", + valid_until=timezone.datetime(2038, 1, 19, 3, 14, 7), ) with self.is_cloud(True): @@ -1007,7 +1139,10 @@ def test_cannot_use_claim_invite_endpoint_to_update_user(self, mock_capture): self.client.force_login(user) - response = self.client.post(f"/api/signup/{invite.id}/", {"first_name": "Bob", "password": "new_password"}) + response = self.client.post( + f"/api/signup/{invite.id}/", + {"first_name": "Bob", "password": "new_password"}, + ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual( response.json(), @@ -1107,7 +1242,8 @@ def test_cant_claim_invalid_invite(self): org_count: int = Organization.objects.count() response = self.client.post( - f"/api/signup/{uuid.uuid4()}/", {"first_name": "Charlie", "password": "test_password"} + f"/api/signup/{uuid.uuid4()}/", + {"first_name": "Charlie", "password": "test_password"}, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -1135,7 +1271,10 @@ def test_cant_claim_expired_invite(self): invite.created_at = datetime.datetime(2020, 3, 3, tzinfo=ZoneInfo("UTC")) invite.save() - response = self.client.post(f"/api/signup/{invite.id}/", {"first_name": "Charlie", "password": "test_password"}) + response = self.client.post( + f"/api/signup/{invite.id}/", + {"first_name": "Charlie", "password": "test_password"}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), @@ -1157,11 +1296,20 @@ def test_api_social_invite_sign_up(self): Organization.objects.all().delete() # Can only create organizations in fresh instances # Simulate SSO process started session = self.client.session - session.update({"backend": "google-oauth2", "email": "test_api_social_invite_sign_up@posthog.com"}) + session.update( + { + "backend": "google-oauth2", + "email": "test_api_social_invite_sign_up@posthog.com", + } + ) session.save() response = self.client.post( - "/api/social_signup", {"organization_name": "Org test_api_social_invite_sign_up", "first_name": "Max"} + "/api/social_signup", + { + "organization_name": "Org test_api_social_invite_sign_up", + "first_name": "Max", + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -1170,11 +1318,16 @@ def test_api_social_invite_sign_up(self): # Check the organization and user were created self.assertEqual( User.objects.filter( - email="test_api_social_invite_sign_up@posthog.com", first_name="Max", is_email_verified=True + email="test_api_social_invite_sign_up@posthog.com", + first_name="Max", + is_email_verified=True, ).count(), 1, ) - self.assertEqual(Organization.objects.filter(name="Org test_api_social_invite_sign_up").count(), 1) + self.assertEqual( + Organization.objects.filter(name="Org test_api_social_invite_sign_up").count(), + 1, + ) @patch("posthog.api.signup.is_email_available", return_value=True) @patch("posthog.api.signup.EmailVerifier.create_token_and_send_email_verification") @@ -1184,13 +1337,19 @@ def test_api_social_invite_sign_up_if_email_verification_on(self, email_mock, em # Simulate SSO process started session = self.client.session session.update( - {"backend": "google-oauth2", "email": "test_api_social_invite_sign_up_with_verification@posthog.com"} + { + "backend": "google-oauth2", + "email": "test_api_social_invite_sign_up_with_verification@posthog.com", + } ) session.save() response = self.client.post( "/api/social_signup", - {"organization_name": "Org test_api_social_invite_sign_up_with_verification", "first_name": "Max"}, + { + "organization_name": "Org test_api_social_invite_sign_up_with_verification", + "first_name": "Max", + }, ) self.assertEqual(response.status_code, status.HTTP_201_CREATED) @@ -1199,12 +1358,14 @@ def test_api_social_invite_sign_up_if_email_verification_on(self, email_mock, em # Check the organization and user were created self.assertEqual( User.objects.filter( - email="test_api_social_invite_sign_up_with_verification@posthog.com", first_name="Max" + email="test_api_social_invite_sign_up_with_verification@posthog.com", + first_name="Max", ).count(), 1, ) self.assertEqual( - Organization.objects.filter(name="Org test_api_social_invite_sign_up_with_verification").count(), 1 + Organization.objects.filter(name="Org test_api_social_invite_sign_up_with_verification").count(), + 1, ) me_response = self.client.get("/api/users/@me/") self.assertEqual(me_response.status_code, status.HTTP_200_OK) @@ -1212,7 +1373,10 @@ def test_api_social_invite_sign_up_if_email_verification_on(self, email_mock, em def test_cannot_use_social_invite_sign_up_if_social_session_is_not_active(self): Organization.objects.all().delete() # Can only create organizations in fresh instances - response = self.client.post("/api/social_signup", {"organization_name": "Tech R Us", "first_name": "Max"}) + response = self.client.post( + "/api/social_signup", + {"organization_name": "Tech R Us", "first_name": "Max"}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), diff --git a/posthog/api/test/test_signup_demo.py b/posthog/api/test/test_signup_demo.py index b15f6e5da52d7..740b9ac6b5d12 100644 --- a/posthog/api/test/test_signup_demo.py +++ b/posthog/api/test/test_signup_demo.py @@ -57,7 +57,12 @@ def test_demo_login(self, *args): # first_name and organization_name aren't used when logging in # In demo, the signup endpoint functions as login if the email already exists response = self.client.post( - "/api/signup/", {"email": "charlie@tech-r-us.com", "first_name": "X", "organization_name": "Y"} + "/api/signup/", + { + "email": "charlie@tech-r-us.com", + "first_name": "X", + "organization_name": "Y", + }, ) user = auth.get_user(self.client) diff --git a/posthog/api/test/test_site_app.py b/posthog/api/test/test_site_app.py index 186a153f805af..82823ac4cf4ed 100644 --- a/posthog/api/test/test_site_app.py +++ b/posthog/api/test/test_site_app.py @@ -25,7 +25,12 @@ def test_site_app(self): status=PluginSourceFile.Status.TRANSPILED, ) plugin_config = PluginConfig.objects.create( - plugin=plugin, enabled=True, order=1, team=self.team, config={}, web_token="tokentoken" + plugin=plugin, + enabled=True, + order=1, + team=self.team, + config={}, + web_token="tokentoken", ) response = self.client.get( diff --git a/posthog/api/test/test_stickiness.py b/posthog/api/test/test_stickiness.py index 1fa882e4a70f5..56d610c205e65 100644 --- a/posthog/api/test/test_stickiness.py +++ b/posthog/api/test/test_stickiness.py @@ -12,7 +12,11 @@ from posthog.client import sync_execute from posthog.constants import ENTITY_ID, ENTITY_TYPE from posthog.models.team import Team -from posthog.test.base import APIBaseTest, create_person_id_override_by_distinct_id, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + create_person_id_override_by_distinct_id, + snapshot_clickhouse_queries, +) from posthog.utils import encode_get_request_params @@ -71,7 +75,11 @@ def _create_multiple_people(self, period=None, event_properties=lambda index: {} if period is None: period = timedelta(days=1) base_time = datetime.fromisoformat("2020-01-01T12:00:00.000000") - p1 = person_factory(team_id=self.team.id, distinct_ids=["person1"], properties={"name": "person1"}) + p1 = person_factory( + team_id=self.team.id, + distinct_ids=["person1"], + properties={"name": "person1"}, + ) p1_person_id = str(uuid.uuid4()) event_factory( team=self.team, @@ -82,7 +90,11 @@ def _create_multiple_people(self, period=None, event_properties=lambda index: {} person_id=p1_person_id, ) - p2 = person_factory(team_id=self.team.id, distinct_ids=["person2"], properties={"name": "person2"}) + p2 = person_factory( + team_id=self.team.id, + distinct_ids=["person2"], + properties={"name": "person2"}, + ) p2_person_id = str(uuid.uuid4()) event_factory( team=self.team, @@ -111,7 +123,9 @@ def _create_multiple_people(self, period=None, event_properties=lambda index: {} ) p3 = person_factory( - team_id=self.team.id, distinct_ids=["person3a", "person3b"], properties={"name": "person3"} + team_id=self.team.id, + distinct_ids=["person3a", "person3b"], + properties={"name": "person3"}, ) p3_person_id = str(uuid.uuid4()) event_factory( @@ -139,7 +153,11 @@ def _create_multiple_people(self, period=None, event_properties=lambda index: {} person_id=p3_person_id, ) - p4 = person_factory(team_id=self.team.id, distinct_ids=["person4"], properties={"name": "person4"}) + p4 = person_factory( + team_id=self.team.id, + distinct_ids=["person4"], + properties={"name": "person4"}, + ) p4_person_id = str(uuid.uuid4()) event_factory( @@ -224,7 +242,11 @@ def test_stickiness_all_time(self): stickiness_response = get_stickiness_ok( client=self.client, team=self.team, - request={"shown_as": "Stickiness", "date_from": "all", "events": [{"id": "watched movie"}]}, + request={ + "shown_as": "Stickiness", + "date_from": "all", + "events": [{"id": "watched movie"}], + }, ) response = stickiness_response["result"] @@ -390,7 +412,12 @@ def test_stickiness_entity_filter(self): "shown_as": "Stickiness", "date_from": "2020-01-01", "date_to": "2020-01-08", - "events": [{"id": "watched movie", "properties": [{"key": "$browser", "value": "Chrome"}]}], + "events": [ + { + "id": "watched movie", + "properties": [{"key": "$browser", "value": "Chrome"}], + } + ], }, ) response = stickiness_response["result"] @@ -544,7 +571,11 @@ def test_stickiness_people_with_entity_filter(self): def test_stickiness_people_paginated(self): for i in range(150): person_name = f"person{i}" - person_factory(team_id=self.team.id, distinct_ids=[person_name], properties={"name": person_name}) + person_factory( + team_id=self.team.id, + distinct_ids=[person_name], + properties={"name": person_name}, + ) event_factory( team=self.team, event="watched movie", @@ -601,7 +632,11 @@ def test_compare(self): def test_filter_test_accounts(self): self._create_multiple_people() - person_factory(team_id=self.team.id, distinct_ids=["ph"], properties={"email": "test@posthog.com"}) + person_factory( + team_id=self.team.id, + distinct_ids=["ph"], + properties={"email": "test@posthog.com"}, + ) event_factory( team=self.team, event="watched movie", diff --git a/posthog/api/test/test_survey.py b/posthog/api/test/test_survey.py index 4225d33a27c16..568d745ffed18 100644 --- a/posthog/api/test/test_survey.py +++ b/posthog/api/test/test_survey.py @@ -1,10 +1,12 @@ from datetime import datetime, timedelta from unittest.mock import ANY +import pytest from rest_framework import status from django.core.cache import cache from django.test.client import Client +from posthog.api.survey import nh3_clean_with_whitelist from posthog.models.feedback.survey import Survey from posthog.test.base import ( @@ -28,7 +30,12 @@ def test_can_create_basic_survey(self): "name": "Notebooks beta release survey", "description": "Get feedback on the new notebooks feature", "type": "popover", - "questions": [{"type": "open", "question": "What do you think of the new notebooks feature?"}], + "questions": [ + { + "type": "open", + "question": "What do you think of the new notebooks feature?", + } + ], "targeting_flag_filters": None, }, format="json", @@ -40,7 +47,10 @@ def test_can_create_basic_survey(self): assert response_data["description"] == "Get feedback on the new notebooks feature" assert response_data["type"] == "popover" assert response_data["questions"] == [ - {"type": "open", "question": "What do you think of the new notebooks feature?"} + { + "type": "open", + "question": "What do you think of the new notebooks feature?", + } ] assert response_data["created_by"]["id"] == self.user.id @@ -52,7 +62,12 @@ def test_can_create_survey_with_linked_flag_and_targeting(self): data={ "name": "Notebooks power users survey", "type": "popover", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], "linked_flag_id": notebooks_flag.id, "targeting_flag_filters": { "groups": [ @@ -60,7 +75,12 @@ def test_can_create_survey_with_linked_flag_and_targeting(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -78,14 +98,24 @@ def test_can_create_survey_with_linked_flag_and_targeting(self): "groups": [ { "variant": None, - "properties": [{"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}], + "properties": [ + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } + ], "rollout_percentage": None, } ] } assert response_data["conditions"] == {"url": "https://app.posthog.com/notebooks"} assert response_data["questions"] == [ - {"type": "open", "question": "What would you want to improve from notebooks?"} + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } ] def test_can_create_survey_with_targeting_with_remove_parameter(self): @@ -94,14 +124,24 @@ def test_can_create_survey_with_targeting_with_remove_parameter(self): data={ "name": "Notebooks power users survey", "type": "popover", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], "targeting_flag_filters": { "groups": [ { "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -119,14 +159,24 @@ def test_can_create_survey_with_targeting_with_remove_parameter(self): "groups": [ { "variant": None, - "properties": [{"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}], + "properties": [ + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } + ], "rollout_percentage": None, } ] } assert response_data["conditions"] == {"url": "https://app.posthog.com/notebooks"} assert response_data["questions"] == [ - {"type": "open", "question": "What would you want to improve from notebooks?"} + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } ] def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> None: @@ -140,7 +190,12 @@ def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> No data={ "name": "Notebooks power users survey", "type": "popover", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], "linked_flag_id": notebooks_flag.id, "targeting_flag_filters": { "groups": [ @@ -148,7 +203,12 @@ def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> No "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -170,7 +230,12 @@ def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> No data={ "name": "Notebooks random survey", "type": "popover", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], "linked_flag_id": notebooks_flag.id, "conditions": {"url": "https://app.posthog.com/notebooks"}, }, @@ -187,7 +252,11 @@ def test_used_in_survey_is_populated_correctly_for_feature_flag_list(self) -> No # add another random feature flag self.client.post( f"/api/projects/{self.team.id}/feature_flags/", - data={"name": f"flag", "key": f"flag_0", "filters": {"groups": [{"rollout_percentage": 5}]}}, + data={ + "name": f"flag", + "key": f"flag_0", + "filters": {"groups": [{"rollout_percentage": 5}]}, + }, format="json", ).json() @@ -215,7 +284,12 @@ def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -246,7 +320,12 @@ def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "email", "value": ["max@posthog.com"], "operator": "exact", "type": "person"} + { + "key": "email", + "value": ["max@posthog.com"], + "operator": "exact", + "type": "person", + } ], } ] @@ -263,7 +342,14 @@ def test_updating_survey_with_targeting_creates_or_updates_targeting_flag(self): "groups": [ { "variant": None, - "properties": [{"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"}], + "properties": [ + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } + ], "rollout_percentage": None, } ] @@ -291,7 +377,12 @@ def test_updating_survey_to_remove_targeting_doesnt_delete_targeting_flag(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -330,7 +421,12 @@ def test_updating_survey_to_send_none_targeting_deletes_targeting_flag(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -369,7 +465,12 @@ def test_updating_survey_other_props_doesnt_delete_targeting_flag(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -405,7 +506,12 @@ def test_survey_targeting_flag_validation(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -569,7 +675,12 @@ def test_deleting_survey_deletes_targeting_flag(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -593,7 +704,12 @@ def test_can_list_surveys(self): "name": "Notebooks power users survey", "type": "popover", "description": "Make notebooks better", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], }, ) @@ -610,7 +726,12 @@ def test_can_list_surveys(self): "name": "Notebooks power users survey", "description": "Make notebooks better", "type": "popover", - "questions": [{"type": "open", "question": "What would you want to improve from notebooks?"}], + "questions": [ + { + "type": "open", + "question": "What would you want to improve from notebooks?", + } + ], "appearance": None, "created_at": ANY, "created_by": ANY, @@ -637,7 +758,12 @@ def test_updating_survey_name_validates(self): "variant": None, "rollout_percentage": None, "properties": [ - {"key": "billing_plan", "value": ["cloud"], "operator": "exact", "type": "person"} + { + "key": "billing_plan", + "value": ["cloud"], + "operator": "exact", + "type": "person", + } ], } ] @@ -775,7 +901,11 @@ def test_create_basic_survey_question_validation(self): "description": "Get feedback on the new notebooks feature", "type": "popover", "questions": [ - {"type": "open", "question": "What up?", "description": "check?"}, + { + "type": "open", + "question": "What up?", + "description": "check?", + }, { "type": "link", "link": "bazinga.com", @@ -826,7 +956,11 @@ def test_update_basic_survey_question_validation(self): "description": "Get feedback on the new notebooks feature", "type": "popover", "questions": [ - {"type": "open", "question": "What up?", "description": "check?"}, + { + "type": "open", + "question": "What up?", + "description": "check?", + }, { "type": "link", "link": "bazinga.com", @@ -1069,3 +1203,33 @@ def test_responses_count_zero_responses(self): data = response.json() self.assertEqual(data, {}) + + +@pytest.mark.parametrize( + "test_input,expected", + [ + ( + """ +
+
+ Your Image +
+
+

Help us stay sharp.

+
+ """, + """ +
+
+ Your Image +
+
+

Help us stay sharp.

+
+
""", + ), + (""" """, """ """), + ], +) +def test_nh3_clean_configuration(test_input, expected): + assert nh3_clean_with_whitelist(test_input).replace(" ", "") == expected.replace(" ", "") diff --git a/posthog/api/test/test_team.py b/posthog/api/test/test_team.py index 052604d151276..297d23e54372c 100644 --- a/posthog/api/test/test_team.py +++ b/posthog/api/test/test_team.py @@ -54,7 +54,8 @@ def test_retrieve_project(self): get_instance_setting("PERSON_ON_EVENTS_ENABLED") or get_instance_setting("PERSON_ON_EVENTS_V2_ENABLED"), ) self.assertEqual( - response_data["groups_on_events_querying_enabled"], get_instance_setting("GROUPS_ON_EVENTS_ENABLED") + response_data["groups_on_events_querying_enabled"], + get_instance_setting("GROUPS_ON_EVENTS_ENABLED"), ) # TODO: These assertions will no longer make sense when we fully remove these attributes from the model @@ -188,13 +189,17 @@ def test_cant_update_project_from_another_org(self): def test_filter_permission(self): response = self.client.patch( - f"/api/projects/{self.team.id}/", {"test_account_filters": [{"key": "$current_url", "value": "test"}]} + f"/api/projects/{self.team.id}/", + {"test_account_filters": [{"key": "$current_url", "value": "test"}]}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() self.assertEqual(response_data["name"], self.team.name) - self.assertEqual(response_data["test_account_filters"], [{"key": "$current_url", "value": "test"}]) + self.assertEqual( + response_data["test_account_filters"], + [{"key": "$current_url", "value": "test"}], + ) @patch("posthog.api.team.delete_bulky_postgres_data") @patch("posthoganalytics.capture") @@ -211,13 +216,18 @@ def test_delete_team_own_second(self, mock_capture: MagicMock, mock_delete_bulky self.assertEqual(response.status_code, 204) self.assertEqual(Team.objects.filter(organization=self.organization).count(), 1) self.assertEqual( - AsyncDeletion.objects.filter(team_id=team.id, deletion_type=DeletionType.Team, key=str(team.id)).count(), 1 + AsyncDeletion.objects.filter(team_id=team.id, deletion_type=DeletionType.Team, key=str(team.id)).count(), + 1, ) mock_capture.assert_called_once_with( self.user.distinct_id, "team deleted", properties={}, - groups={"instance": ANY, "organization": str(self.organization.id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.organization.id), + "project": str(self.team.uuid), + }, ) mock_delete_bulky_postgres_data.assert_called_once_with(team_ids=[team.pk]) @@ -240,14 +250,23 @@ def test_delete_bulky_postgres_data(self): cohort = Cohort.objects.create(team=team, created_by=self.user, name="test") person = Person.objects.create( - team=team, distinct_ids=["example_id"], properties={"email": "tim@posthog.com", "team": "posthog"} + team=team, + distinct_ids=["example_id"], + properties={"email": "tim@posthog.com", "team": "posthog"}, ) person.add_distinct_id("test") flag = FeatureFlag.objects.create( - team=team, name="test", key="test", rollout_percentage=50, created_by=self.user + team=team, + name="test", + key="test", + rollout_percentage=50, + created_by=self.user, ) FeatureFlagHashKeyOverride.objects.create( - team_id=team.pk, person_id=person.id, feature_flag_key=flag.key, hash_key="test" + team_id=team.pk, + person_id=person.id, + feature_flag_key=flag.key, + hash_key="test", ) CohortPeople.objects.create(cohort_id=cohort.pk, person_id=person.pk) EarlyAccessFeature.objects.create( @@ -359,13 +378,16 @@ def test_update_timezone_remove_cache(self): data={"filters": {"events": json.dumps([{"id": "user signed up"}])}}, ) response = self.client.post( - f"/api/projects/{self.team.id}/insights/", data={"filters": {"events": json.dumps([{"id": "$pageview"}])}} + f"/api/projects/{self.team.id}/insights/", + data={"filters": {"events": json.dumps([{"id": "$pageview"}])}}, ).json() self.client.get( - f"/api/projects/{self.team.id}/insights/trend/", data={"events": json.dumps([{"id": "$pageview"}])} + f"/api/projects/{self.team.id}/insights/trend/", + data={"events": json.dumps([{"id": "$pageview"}])}, ) self.client.get( - f"/api/projects/{self.team.id}/insights/trend/", data={"events": json.dumps([{"id": "user signed up"}])} + f"/api/projects/{self.team.id}/insights/trend/", + data={"events": json.dumps([{"id": "user signed up"}])}, ) self.assertEqual(cache.get(response["filters_hash"])["result"][0]["count"], 0) @@ -412,7 +434,8 @@ def test_team_is_cached_on_create_and_update(self): self.assertEqual(cached_team.id, response.json()["id"]) response = self.client.patch( - f"/api/projects/{team_id}/", {"timezone": "Europe/Istanbul", "session_recording_opt_in": True} + f"/api/projects/{team_id}/", + {"timezone": "Europe/Istanbul", "session_recording_opt_in": True}, ) self.assertEqual(response.status_code, status.HTTP_200_OK) @@ -443,7 +466,10 @@ def test_turn_on_exception_autocapture(self): response = self.client.get("/api/projects/@current/") assert response.json()["autocapture_exceptions_opt_in"] is None - response = self.client.patch("/api/projects/@current/", {"autocapture_exceptions_opt_in": "Welwyn Garden City"}) + response = self.client.patch( + "/api/projects/@current/", + {"autocapture_exceptions_opt_in": "Welwyn Garden City"}, + ) assert response.status_code == status.HTTP_400_BAD_REQUEST assert response.json()["detail"] == "Must be a valid boolean." @@ -457,12 +483,16 @@ def test_configure_exception_autocapture_event_dropping(self): assert response.json()["autocapture_exceptions_errors_to_ignore"] is None response = self.client.patch( - "/api/projects/@current/", {"autocapture_exceptions_errors_to_ignore": {"wat": "am i"}} + "/api/projects/@current/", + {"autocapture_exceptions_errors_to_ignore": {"wat": "am i"}}, ) assert response.status_code == status.HTTP_400_BAD_REQUEST assert response.json()["detail"] == "Must provide a list for field: autocapture_exceptions_errors_to_ignore." - response = self.client.patch("/api/projects/@current/", {"autocapture_exceptions_errors_to_ignore": [1, False]}) + response = self.client.patch( + "/api/projects/@current/", + {"autocapture_exceptions_errors_to_ignore": [1, False]}, + ) assert response.status_code == status.HTTP_400_BAD_REQUEST assert ( response.json()["detail"] @@ -470,7 +500,8 @@ def test_configure_exception_autocapture_event_dropping(self): ) response = self.client.patch( - "/api/projects/@current/", {"autocapture_exceptions_errors_to_ignore": ["wat am i"]} + "/api/projects/@current/", + {"autocapture_exceptions_errors_to_ignore": ["wat am i"]}, ) assert response.status_code == status.HTTP_200_OK response = self.client.get("/api/projects/@current/") @@ -478,7 +509,8 @@ def test_configure_exception_autocapture_event_dropping(self): def test_configure_exception_autocapture_event_dropping_only_allows_simple_config(self): response = self.client.patch( - "/api/projects/@current/", {"autocapture_exceptions_errors_to_ignore": ["abc" * 300]} + "/api/projects/@current/", + {"autocapture_exceptions_errors_to_ignore": ["abc" * 300]}, ) assert response.status_code == status.HTTP_400_BAD_REQUEST assert ( @@ -488,10 +520,30 @@ def test_configure_exception_autocapture_event_dropping_only_allows_simple_confi @parameterized.expand( [ - ["non numeric string", "Welwyn Garden City", "invalid_input", "A valid number is required."], - ["negative number", "-1", "min_value", "Ensure this value is greater than or equal to 0."], - ["greater than one", "1.5", "max_value", "Ensure this value is less than or equal to 1."], - ["too many digits", "0.534", "max_decimal_places", "Ensure that there are no more than 2 decimal places."], + [ + "non numeric string", + "Welwyn Garden City", + "invalid_input", + "A valid number is required.", + ], + [ + "negative number", + "-1", + "min_value", + "Ensure this value is greater than or equal to 0.", + ], + [ + "greater than one", + "1.5", + "max_value", + "Ensure this value is less than or equal to 1.", + ], + [ + "too many digits", + "0.534", + "max_decimal_places", + "Ensure that there are no more than 2 decimal places.", + ], ] ) def test_invalid_session_recording_sample_rates( @@ -508,9 +560,24 @@ def test_invalid_session_recording_sample_rates( @parameterized.expand( [ - ["non numeric string", "Trentham monkey forest", "invalid_input", "A valid integer is required."], - ["negative number", "-1", "min_value", "Ensure this value is greater than or equal to 0."], - ["greater than 15000", "15001", "max_value", "Ensure this value is less than or equal to 15000."], + [ + "non numeric string", + "Trentham monkey forest", + "invalid_input", + "A valid integer is required.", + ], + [ + "negative number", + "-1", + "min_value", + "Ensure this value is greater than or equal to 0.", + ], + [ + "greater than 15000", + "15001", + "max_value", + "Ensure this value is less than or equal to 15000.", + ], ["too many digits", "0.5", "invalid_input", "A valid integer is required."], ] ) @@ -518,7 +585,8 @@ def test_invalid_session_recording_minimum_duration( self, _name: str, provided_value: str, expected_code: str, expected_error: str ) -> None: response = self.client.patch( - "/api/projects/@current/", {"session_recording_minimum_duration_milliseconds": provided_value} + "/api/projects/@current/", + {"session_recording_minimum_duration_milliseconds": provided_value}, ) assert response.status_code == status.HTTP_400_BAD_REQUEST assert response.json() == { @@ -530,7 +598,12 @@ def test_invalid_session_recording_minimum_duration( @parameterized.expand( [ - ["string", "Marple bridge", "invalid_input", "Must provide a dictionary or None."], + [ + "string", + "Marple bridge", + "invalid_input", + "Must provide a dictionary or None.", + ], ["numeric", "-1", "invalid_input", "Must provide a dictionary or None."], [ "unexpected json - no id", @@ -566,11 +639,15 @@ def test_invalid_session_recording_linked_flag( def test_can_set_and_unset_session_recording_linked_flag(self) -> None: first_patch_response = self.client.patch( - "/api/projects/@current/", {"session_recording_linked_flag": {"id": 1, "key": "provided_value"}} + "/api/projects/@current/", + {"session_recording_linked_flag": {"id": 1, "key": "provided_value"}}, ) assert first_patch_response.status_code == status.HTTP_200_OK get_response = self.client.get("/api/projects/@current/") - assert get_response.json()["session_recording_linked_flag"] == {"id": 1, "key": "provided_value"} + assert get_response.json()["session_recording_linked_flag"] == { + "id": 1, + "key": "provided_value", + } response = self.client.patch("/api/projects/@current/", {"session_recording_linked_flag": None}) assert response.status_code == status.HTTP_200_OK @@ -585,7 +662,11 @@ def create_team(organization: Organization, name: str = "Test team") -> Team: with real world scenarios. """ return Team.objects.create( - organization=organization, name=name, ingested_event=True, completed_snippet_onboarding=True, is_demo=True + organization=organization, + name=name, + ingested_event=True, + completed_snippet_onboarding=True, + is_demo=True, ) diff --git a/posthog/api/test/test_uploaded_media.py b/posthog/api/test/test_uploaded_media.py index c611643cb2610..2a7a23407fef6 100644 --- a/posthog/api/test/test_uploaded_media.py +++ b/posthog/api/test/test_uploaded_media.py @@ -52,7 +52,9 @@ def test_can_upload_and_retrieve_a_file(self) -> None: with self.settings(OBJECT_STORAGE_ENABLED=True, OBJECT_STORAGE_MEDIA_UPLOADS_FOLDER=TEST_BUCKET): with open(get_path_to("a-small-but-valid.gif"), "rb") as image: response = self.client.post( - f"/api/projects/{self.team.id}/uploaded_media", {"image": image}, format="multipart" + f"/api/projects/{self.team.id}/uploaded_media", + {"image": image}, + format="multipart", ) self.assertEqual(response.status_code, status.HTTP_201_CREATED, response.json()) assert response.json()["name"] == "a-small-but-valid.gif" @@ -68,14 +70,22 @@ def test_can_upload_and_retrieve_a_file(self) -> None: def test_rejects_non_image_file_type(self) -> None: fake_file = SimpleUploadedFile(name="test_image.jpg", content=b"a fake image", content_type="text/csv") response = self.client.post( - f"/api/projects/{self.team.id}/uploaded_media", {"image": fake_file}, format="multipart" + f"/api/projects/{self.team.id}/uploaded_media", + {"image": fake_file}, + format="multipart", + ) + self.assertEqual( + response.status_code, + status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, + response.json(), ) - self.assertEqual(response.status_code, status.HTTP_415_UNSUPPORTED_MEDIA_TYPE, response.json()) def test_rejects_file_manually_crafted_to_start_with_image_magic_bytes(self) -> None: with open(get_path_to("file-masquerading-as-a.gif"), "rb") as image: response = self.client.post( - f"/api/projects/{self.team.id}/uploaded_media", {"image": image}, format="multipart" + f"/api/projects/{self.team.id}/uploaded_media", + {"image": image}, + format="multipart", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.json()) @@ -88,10 +98,14 @@ def test_made_up_id_is_404(self) -> None: def test_rejects_too_large_file_type(self) -> None: four_megabytes_plus_a_little = b"1" * (4 * 1024 * 1024 + 1) fake_big_file = SimpleUploadedFile( - name="test_image.jpg", content=four_megabytes_plus_a_little, content_type="image/jpeg" + name="test_image.jpg", + content=four_megabytes_plus_a_little, + content_type="image/jpeg", ) response = self.client.post( - f"/api/projects/{self.team.id}/uploaded_media", {"image": fake_big_file}, format="multipart" + f"/api/projects/{self.team.id}/uploaded_media", + {"image": fake_big_file}, + format="multipart", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.json()) self.assertEqual(response.json()["detail"], "Uploaded media must be less than 4MB") @@ -100,7 +114,12 @@ def test_rejects_upload_when_object_storage_is_unavailable(self) -> None: with override_settings(OBJECT_STORAGE_ENABLED=False): fake_big_file = SimpleUploadedFile(name="test_image.jpg", content=b"", content_type="image/jpeg") response = self.client.post( - f"/api/projects/{self.team.id}/uploaded_media", {"image": fake_big_file}, format="multipart" + f"/api/projects/{self.team.id}/uploaded_media", + {"image": fake_big_file}, + format="multipart", ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, response.json()) - self.assertEqual(response.json()["detail"], "Object storage must be available to allow media uploads.") + self.assertEqual( + response.json()["detail"], + "Object storage must be available to allow media uploads.", + ) diff --git a/posthog/api/test/test_user.py b/posthog/api/test/test_user.py index 62dca8dad6277..c5d93bbef211e 100644 --- a/posthog/api/test/test_user.py +++ b/posthog/api/test/test_user.py @@ -206,7 +206,11 @@ def test_update_current_user(self, mock_capture, mock_identify_task): "partial_notification_settings", ] }, - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) @patch("posthog.tasks.user_identify.identify_task") @@ -221,7 +225,12 @@ def test_set_scene_personalisation_for_user_dashboard_must_be_in_current_team( response = self.client.post( "/api/users/@me/scene_personalisation", # even if someone tries to send a different user or team they are ignored - {"user": 12345, "team": 12345, "dashboard": str(dashboard_one.id), "scene": "Person"}, + { + "user": 12345, + "team": 12345, + "dashboard": str(dashboard_one.id), + "scene": "Person", + }, ) assert response.status_code == status.HTTP_400_BAD_REQUEST @@ -320,7 +329,12 @@ def _assert_set_scene_choice( response = self.client.post( "/api/users/@me/scene_personalisation", # even if someone tries to send a different user or team they are ignored - {"user": 12345, "team": 12345, "dashboard": str(dashboard.id), "scene": scene}, + { + "user": 12345, + "team": 12345, + "dashboard": str(dashboard.id), + "scene": scene, + }, ) assert response.status_code == status.HTTP_200_OK response_data = response.json() @@ -354,7 +368,10 @@ def test_no_notifications_when_user_email_is_changed_and_email_not_available( @patch("posthog.tasks.email.send_email_change_emails.delay") @patch("posthog.tasks.email.send_email_verification.delay") def test_notifications_sent_when_user_email_is_changed_and_email_available( - self, mock_send_email_verification, mock_send_email_change_emails, mock_is_email_available + self, + mock_send_email_verification, + mock_send_email_change_emails, + mock_is_email_available, ): """Test that when a user updates their email, they receive a verification email before the switch actually happens.""" self.user.email = "alpha@example.com" @@ -381,7 +398,10 @@ def test_notifications_sent_when_user_email_is_changed_and_email_available( token = email_verification_token_generator.make_token(self.user) with freeze_time("2020-01-01T21:37:00+00:00"): - response = self.client.post(f"/api/users/@me/verify_email/", {"uuid": self.user.uuid, "token": token}) + response = self.client.post( + f"/api/users/@me/verify_email/", + {"uuid": self.user.uuid, "token": token}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) self.user.refresh_from_db() @@ -389,7 +409,10 @@ def test_notifications_sent_when_user_email_is_changed_and_email_available( self.assertIsNone(self.user.pending_email) mock_is_email_available.assert_called_once() mock_send_email_change_emails.assert_called_once_with( - "2020-01-01T21:37:00+00:00", self.user.first_name, "alpha@example.com", "beta@example.com" + "2020-01-01T21:37:00+00:00", + self.user.first_name, + "alpha@example.com", + "beta@example.com", ) @patch("posthog.api.user.is_email_available", return_value=True) @@ -420,7 +443,8 @@ def test_cannot_upgrade_yourself_to_staff_user(self): self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.assertEqual( - response.json(), self.permission_denied_response("You are not a staff user, contact your instance admin.") + response.json(), + self.permission_denied_response("You are not a staff user, contact your instance admin."), ) self.user.refresh_from_db() @@ -447,7 +471,11 @@ def test_can_update_current_organization(self, mock_capture, mock_identify): self.user.distinct_id, "user updated", properties={"updated_attrs": ["current_organization", "current_team"]}, - groups={"instance": ANY, "organization": str(self.new_org.id), "project": str(self.new_project.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.new_org.id), + "project": str(self.new_project.uuid), + }, ) @patch("posthog.tasks.user_identify.identify_task") @@ -471,7 +499,11 @@ def test_can_update_current_project(self, mock_capture, mock_identify): self.user.distinct_id, "user updated", properties={"updated_attrs": ["current_organization", "current_team"]}, - groups={"instance": ANY, "organization": str(self.new_org.id), "project": str(team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.new_org.id), + "project": str(team.uuid), + }, ) def test_cannot_set_mismatching_org_and_team(self): @@ -481,7 +513,11 @@ def test_cannot_set_mismatching_org_and_team(self): self.user.join(organization=org) response = self.client.patch( - "/api/users/@me/", {"set_current_team": team.id, "set_current_organization": self.organization.id} + "/api/users/@me/", + { + "set_current_team": team.id, + "set_current_organization": self.organization.id, + }, ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( @@ -583,7 +619,10 @@ def test_user_can_update_password(self, mock_capture, mock_identify): user = self._create_user("bob@posthog.com", password="A12345678") self.client.force_login(user) - response = self.client.patch("/api/users/@me/", {"current_password": "A12345678", "password": "a_new_password"}) + response = self.client.patch( + "/api/users/@me/", + {"current_password": "A12345678", "password": "a_new_password"}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() self.assertEqual(response_data["email"], "bob@posthog.com") @@ -602,7 +641,11 @@ def test_user_can_update_password(self, mock_capture, mock_identify): user.distinct_id, "user updated", properties={"updated_attrs": ["password"]}, - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) # User can log in with new password @@ -616,7 +659,8 @@ def test_user_with_no_password_set_can_set_password(self, mock_capture, mock_ide self.client.force_login(user) response = self.client.patch( - "/api/users/@me/", {"password": "a_new_password"} # note we don't send current password + "/api/users/@me/", + {"password": "a_new_password"}, # note we don't send current password ) self.assertEqual(response.status_code, status.HTTP_200_OK) response_data = response.json() @@ -636,11 +680,18 @@ def test_user_with_no_password_set_can_set_password(self, mock_capture, mock_ide user.distinct_id, "user updated", properties={"updated_attrs": ["password"]}, - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) # User can log in with new password - response = self.client.post("/api/login", {"email": "no_password@posthog.com", "password": "a_new_password"}) + response = self.client.post( + "/api/login", + {"email": "no_password@posthog.com", "password": "a_new_password"}, + ) self.assertEqual(response.status_code, status.HTTP_200_OK) def test_user_with_unusable_password_set_can_set_password(self): @@ -663,7 +714,10 @@ def test_user_with_unusable_password_set_can_set_password(self): @patch("posthog.tasks.user_identify.identify_task") @patch("posthoganalytics.capture") def test_cannot_update_to_insecure_password(self, mock_capture, mock_identify): - response = self.client.patch("/api/users/@me/", {"current_password": self.CONFIG_PASSWORD, "password": "123"}) + response = self.client.patch( + "/api/users/@me/", + {"current_password": self.CONFIG_PASSWORD, "password": "123"}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), @@ -740,7 +794,10 @@ def test_user_cannot_update_password_with_incorrect_current_password_and_ratelim for _ in range(7): response = self.client.patch("/api/users/@me/", {"current_password": "wrong", "password": "12345678"}) self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) - self.assertDictContainsSubset({"attr": None, "code": "throttled", "type": "throttled_error"}, response.json()) + self.assertDictContainsSubset( + {"attr": None, "code": "throttled", "type": "throttled_error"}, + response.json(), + ) # Password was not changed self.user.refresh_from_db() @@ -957,7 +1014,9 @@ def test_user_can_request_verification_email(self, mock_capture): html_message = mail.outbox[0].alternatives[0][0] # type: ignore self.validate_basic_html( - html_message, "https://my.posthog.net", preheader="Please follow the link inside to verify your account." + html_message, + "https://my.posthog.net", + preheader="Please follow the link inside to verify your account.", ) link_index = html_message.find("https://my.posthog.net/verify_email") reset_link = html_message[link_index : html_message.find('"', link_index)] @@ -975,7 +1034,11 @@ def test_user_can_request_verification_email(self, mock_capture): self.user.distinct_id, "user logged in", properties={"social_provider": ""}, - groups={"instance": ANY, "organization": str(self.team.organization_id), "project": str(self.team.uuid)}, + groups={ + "instance": ANY, + "organization": str(self.team.organization_id), + "project": str(self.team.uuid), + }, ) mock_capture.assert_any_call( self.user.distinct_id, @@ -1003,14 +1066,18 @@ def test_cant_verify_more_than_six_times(self): for i in range(7): with self.settings(CELERY_TASK_ALWAYS_EAGER=True, SITE_URL="https://my.posthog.net"): - response = self.client.post(f"/api/users/@me/request_email_verification/", {"uuid": self.user.uuid}) + response = self.client.post( + f"/api/users/@me/request_email_verification/", + {"uuid": self.user.uuid}, + ) if i < 6: self.assertEqual(response.status_code, status.HTTP_200_OK) else: # Fourth request should fail self.assertEqual(response.status_code, status.HTTP_429_TOO_MANY_REQUESTS) self.assertDictContainsSubset( - {"attr": None, "code": "throttled", "type": "throttled_error"}, response.json() + {"attr": None, "code": "throttled", "type": "throttled_error"}, + response.json(), ) # Three emails should be sent, fourth should not @@ -1028,7 +1095,12 @@ def test_cant_validate_email_verification_token_without_a_token(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), - {"type": "validation_error", "code": "required", "detail": "This field is required.", "attr": "token"}, + { + "type": "validation_error", + "code": "required", + "detail": "This field is required.", + "attr": "token", + }, ) def test_invalid_verification_token_returns_error(self): @@ -1038,8 +1110,16 @@ def test_invalid_verification_token_returns_error(self): # tokens expire after one day expired_token = default_token_generator.make_token(self.user) - for token in [valid_token[:-1], "not_even_trying", self.user.uuid, expired_token]: - response = self.client.post(f"/api/users/@me/verify_email/", {"uuid": self.user.uuid, "token": token}) + for token in [ + valid_token[:-1], + "not_even_trying", + self.user.uuid, + expired_token, + ]: + response = self.client.post( + f"/api/users/@me/verify_email/", + {"uuid": self.user.uuid, "token": token}, + ) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual( response.json(), diff --git a/posthog/api/test/test_utils.py b/posthog/api/test/test_utils.py index 84a3d7315c220..8e0f08b009606 100644 --- a/posthog/api/test/test_utils.py +++ b/posthog/api/test/test_utils.py @@ -46,17 +46,24 @@ def test_format_paginated_url(self): "http://testserver/api/some_url?offset=10", ) self.assertEqual( - format_paginated_url(request("/api/some_url?offset=0"), offset=0, page_size=10), "api/some_url?offset=10" + format_paginated_url(request("/api/some_url?offset=0"), offset=0, page_size=10), + "api/some_url?offset=10", ) self.assertEqual( format_paginated_url( - request("/api/some_url?offset=0"), offset=0, page_size=10, mode=PaginationMode.previous + request("/api/some_url?offset=0"), + offset=0, + page_size=10, + mode=PaginationMode.previous, ), None, ) self.assertEqual( format_paginated_url( - request("/api/some_url?offset=0"), offset=20, page_size=10, mode=PaginationMode.previous + request("/api/some_url?offset=0"), + offset=20, + page_size=10, + mode=PaginationMode.previous, ), "api/some_url?offset=0", ) @@ -64,7 +71,11 @@ def test_format_paginated_url(self): def test_get_target_entity(self): request = lambda url: cast(Any, RequestFactory().get(url)) filter = Filter( - data={"entity_id": "$pageview", "entity_type": "events", "events": [{"id": "$pageview", "type": "events"}]} + data={ + "entity_id": "$pageview", + "entity_type": "events", + "events": [{"id": "$pageview", "type": "events"}], + } ) entity = get_target_entity(filter) @@ -90,10 +101,20 @@ def test_get_target_entity(self): assert entity.math == "unique_group" def test_check_definition_ids_inclusion_field_sql(self): + definition_ids = [ + "", + None, + '["1fcefbef-7ea1-42fd-abca-4848b53133c0", "c8452399-8a10-4142-864d-6f2ca8c65154"]', + ] - definition_ids = ["", None, '["1fcefbef-7ea1-42fd-abca-4848b53133c0", "c8452399-8a10-4142-864d-6f2ca8c65154"]'] - - expected_ids_list = [[], [], ["1fcefbef-7ea1-42fd-abca-4848b53133c0", "c8452399-8a10-4142-864d-6f2ca8c65154"]] + expected_ids_list = [ + [], + [], + [ + "1fcefbef-7ea1-42fd-abca-4848b53133c0", + "c8452399-8a10-4142-864d-6f2ca8c65154", + ], + ] for raw_ids, expected_ids in zip(definition_ids, expected_ids_list): ordered_expected_ids = list(set(expected_ids)) # type: ignore @@ -155,27 +176,43 @@ def test_raise_if_user_provided_url_unsafe(self): raise_if_user_provided_url_unsafe("https://1.1.1.1") # Safe, public IP self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe("")) self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe("@@@")) - self.assertRaisesMessage(ValueError, "No hostname", lambda: raise_if_user_provided_url_unsafe("posthog.com")) + self.assertRaisesMessage( + ValueError, + "No hostname", + lambda: raise_if_user_provided_url_unsafe("posthog.com"), + ) self.assertRaisesMessage( ValueError, "Scheme must be either HTTP or HTTPS", lambda: raise_if_user_provided_url_unsafe("ftp://posthog.com"), ) self.assertRaisesMessage( - ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://localhost") + ValueError, + "Internal hostname", + lambda: raise_if_user_provided_url_unsafe("http://localhost"), ) self.assertRaisesMessage( - ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://192.168.0.5") + ValueError, + "Internal hostname", + lambda: raise_if_user_provided_url_unsafe("http://192.168.0.5"), ) self.assertRaisesMessage( - ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://0.0.0.0") + ValueError, + "Internal hostname", + lambda: raise_if_user_provided_url_unsafe("http://0.0.0.0"), ) self.assertRaisesMessage( - ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://10.0.0.24") + ValueError, + "Internal hostname", + lambda: raise_if_user_provided_url_unsafe("http://10.0.0.24"), ) self.assertRaisesMessage( - ValueError, "Internal hostname", lambda: raise_if_user_provided_url_unsafe("http://172.20.0.21") + ValueError, + "Internal hostname", + lambda: raise_if_user_provided_url_unsafe("http://172.20.0.21"), ) self.assertRaisesMessage( - ValueError, "Invalid hostname", lambda: raise_if_user_provided_url_unsafe("http://fgtggggzzggggfd.com") + ValueError, + "Invalid hostname", + lambda: raise_if_user_provided_url_unsafe("http://fgtggggzzggggfd.com"), ) # Non-existent diff --git a/posthog/api/uploaded_media.py b/posthog/api/uploaded_media.py index 5b0e68b5ab2e7..4893994ecdb55 100644 --- a/posthog/api/uploaded_media.py +++ b/posthog/api/uploaded_media.py @@ -7,7 +7,11 @@ from drf_spectacular.utils import extend_schema from PIL import Image from rest_framework import status, viewsets -from rest_framework.exceptions import APIException, UnsupportedMediaType, ValidationError +from rest_framework.exceptions import ( + APIException, + UnsupportedMediaType, + ValidationError, +) from rest_framework.parsers import FormParser, MultiPartParser from rest_framework.permissions import IsAuthenticatedOrReadOnly from rest_framework.response import Response @@ -16,7 +20,10 @@ from posthog.api.routing import StructuredViewSetMixin from posthog.models import UploadedMedia from posthog.models.uploaded_media import ObjectStorageUnavailable -from posthog.permissions import ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission +from posthog.permissions import ( + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, +) from posthog.storage import object_storage FOUR_MEGABYTES = 4 * 1024 * 1024 @@ -43,7 +50,12 @@ def validate_image_file(file: Optional[bytes], user: int) -> bool: im.close() return True except Exception as e: - logger.error("uploaded_media.image_verification_error", user=user, exception=e, exc_info=True) + logger.error( + "uploaded_media.image_verification_error", + user=user, + exception=e, + exc_info=True, + ) return False @@ -61,7 +73,10 @@ def download(request, *args, **kwargs) -> HttpResponse: file_bytes = object_storage.read_bytes(instance.media_location) - statsd.incr("uploaded_media.served", tags={"team_id": instance.team_id, "uuid": kwargs["image_uuid"]}) + statsd.incr( + "uploaded_media.served", + tags={"team_id": instance.team_id, "uuid": kwargs["image_uuid"]}, + ) return HttpResponse( file_bytes, @@ -109,15 +124,20 @@ def create(self, request, *args, **kwargs) -> Response: bytes_to_verify = object_storage.read_bytes(uploaded_media.media_location) if not validate_image_file(bytes_to_verify, user=request.user.id): statsd.incr( - "uploaded_media.image_failed_validation", tags={"file_name": file.name, "team": self.team_id} + "uploaded_media.image_failed_validation", + tags={"file_name": file.name, "team": self.team_id}, ) # TODO a batch process can delete media with no records in the DB or for deleted teams uploaded_media.delete() - raise ValidationError(code="invalid_image", detail="Uploaded media must be a valid image") + raise ValidationError( + code="invalid_image", + detail="Uploaded media must be a valid image", + ) headers = self.get_success_headers(uploaded_media.get_absolute_url()) statsd.incr( - "uploaded_media.uploaded", tags={"team_id": self.team.pk, "content_type": file.content_type} + "uploaded_media.uploaded", + tags={"team_id": self.team.pk, "content_type": file.content_type}, ) return Response( { @@ -134,7 +154,8 @@ def create(self, request, *args, **kwargs) -> Response: raise ValidationError(code="no-image-provided", detail="An image file must be provided") except ObjectStorageUnavailable: raise ValidationError( - code="object_storage_required", detail="Object storage must be available to allow media uploads." + code="object_storage_required", + detail="Object storage must be available to allow media uploads.", ) def get_success_headers(self, location: str) -> Dict: diff --git a/posthog/api/user.py b/posthog/api/user.py index 75276eca4f5ce..541a428074389 100644 --- a/posthog/api/user.py +++ b/posthog/api/user.py @@ -35,7 +35,11 @@ from posthog.auth import authenticate_secondarily from posthog.cloud_utils import is_cloud from posthog.email import is_email_available -from posthog.event_usage import report_user_logged_in, report_user_updated, report_user_verified_email +from posthog.event_usage import ( + report_user_logged_in, + report_user_updated, + report_user_verified_email, +) from posthog.models import Team, User, UserScenePersonalisation, Dashboard from posthog.models.organization import Organization from posthog.models.user import NOTIFICATION_DEFAULTS, Notifications @@ -110,7 +114,10 @@ class Meta: "has_seen_product_intro_for", "scene_personalisation", ] - extra_kwargs = {"date_joined": {"read_only": True}, "password": {"write_only": True}} + extra_kwargs = { + "date_joined": {"read_only": True}, + "password": {"write_only": True}, + } def get_has_password(self, instance: User) -> bool: return instance.has_usable_password() @@ -166,12 +173,14 @@ def validate_password_change( # usable (properly hashed) and that a password actually exists. if not current_password: raise serializers.ValidationError( - {"current_password": ["This field is required when updating your password."]}, code="required" + {"current_password": ["This field is required when updating your password."]}, + code="required", ) if not instance.check_password(current_password): raise serializers.ValidationError( - {"current_password": ["Your current password is incorrect."]}, code="incorrect_password" + {"current_password": ["Your current password is incorrect."]}, + code="incorrect_password", ) try: validate_password(password, instance) @@ -276,7 +285,12 @@ def save(self, **kwargs): ) -class UserViewSet(mixins.RetrieveModelMixin, mixins.UpdateModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet): +class UserViewSet( + mixins.RetrieveModelMixin, + mixins.UpdateModelMixin, + mixins.ListModelMixin, + viewsets.GenericViewSet, +): throttle_classes = [UserAuthenticationThrottle] serializer_class = UserSerializer permission_classes = [permissions.IsAuthenticated] @@ -305,7 +319,10 @@ def get_queryset(self): return queryset def get_serializer_context(self): - return {**super().get_serializer_context(), "user_permissions": UserPermissions(cast(User, self.request.user))} + return { + **super().get_serializer_context(), + "user_permissions": UserPermissions(cast(User, self.request.user)), + } @action(methods=["GET"], detail=True) def start_2fa_setup(self, request, **kwargs): @@ -319,7 +336,9 @@ def start_2fa_setup(self, request, **kwargs): @action(methods=["POST"], detail=True) def validate_2fa(self, request, **kwargs): form = TOTPDeviceForm( - request.session["django_two_factor-hex"], request.user, data={"token": request.data["token"]} + request.session["django_two_factor-hex"], + request.user, + data={"token": request.data["token"]}, ) if not form.is_valid(): raise serializers.ValidationError("Token is not valid", code="token_invalid") @@ -345,7 +364,8 @@ def verify_email(self, request, **kwargs): if not user or not EmailVerifier.check_token(user, token): raise serializers.ValidationError( - {"token": ["This verification token is invalid or has expired."]}, code="invalid_token" + {"token": ["This verification token is invalid or has expired."]}, + code="invalid_token", ) if user.pending_email: @@ -364,7 +384,10 @@ def verify_email(self, request, **kwargs): return Response({"success": True, "token": token}) @action( - methods=["POST"], detail=True, permission_classes=[AllowAny], throttle_classes=[UserEmailVerificationThrottle] + methods=["POST"], + detail=True, + permission_classes=[AllowAny], + throttle_classes=[UserEmailVerificationThrottle], ) def request_email_verification(self, request, **kwargs): uuid = request.data["uuid"] diff --git a/posthog/apps.py b/posthog/apps.py index 6ae001ccf93fc..3e6b2aaf76fee 100644 --- a/posthog/apps.py +++ b/posthog/apps.py @@ -7,7 +7,12 @@ from posthoganalytics.client import Client from posthog.settings import SELF_CAPTURE, SKIP_ASYNC_MIGRATIONS_SETUP -from posthog.utils import get_git_branch, get_git_commit, get_machine_id, get_self_capture_api_token +from posthog.utils import ( + get_git_branch, + get_git_commit, + get_machine_id, + get_self_capture_api_token, +) logger = structlog.get_logger(__name__) diff --git a/posthog/async_migrations/definition.py b/posthog/async_migrations/definition.py index 77e7261aab55e..859b8af08819d 100644 --- a/posthog/async_migrations/definition.py +++ b/posthog/async_migrations/definition.py @@ -19,7 +19,11 @@ class AsyncMigrationOperation: - def __init__(self, fn: Callable[[str], None], rollback_fn: Callable[[str], None] = lambda _: None): + def __init__( + self, + fn: Callable[[str], None], + rollback_fn: Callable[[str], None] = lambda _: None, + ): self.fn = fn # This should not be a long operation as it will be executed synchronously! @@ -55,7 +59,10 @@ def rollback_fn(self, query_id: str): self._execute_op(query_id, self.rollback, self.rollback_settings) def _execute_op(self, query_id: str, sql: str, settings: Optional[Dict]): - from posthog.async_migrations.utils import execute_op_clickhouse, execute_op_postgres + from posthog.async_migrations.utils import ( + execute_op_clickhouse, + execute_op_postgres, + ) if self.database == AnalyticsDBMS.CLICKHOUSE: execute_op_clickhouse( diff --git a/posthog/async_migrations/disk_util.py b/posthog/async_migrations/disk_util.py index 96ff6a383e7fc..ac7398a2c3e2f 100644 --- a/posthog/async_migrations/disk_util.py +++ b/posthog/async_migrations/disk_util.py @@ -27,10 +27,17 @@ def analyze_enough_disk_space_free_for_table(table_name: str, required_ratio: fl total_disk_space - (free_disk_space - %(ratio)s * table_size) AS required, formatReadableSize(required) """, - {"database": CLICKHOUSE_DATABASE, "table_name": table_name, "ratio": required_ratio}, + { + "database": CLICKHOUSE_DATABASE, + "table_name": table_name, + "ratio": required_ratio, + }, )[0] if current_ratio >= required_ratio: return (True, None) else: - return (False, f"Upgrade your ClickHouse storage to at least {required_space_pretty}.") + return ( + False, + f"Upgrade your ClickHouse storage to at least {required_space_pretty}.", + ) diff --git a/posthog/async_migrations/examples/example.py b/posthog/async_migrations/examples/example.py index 1c0143744d796..c079c5ca6e504 100644 --- a/posthog/async_migrations/examples/example.py +++ b/posthog/async_migrations/examples/example.py @@ -28,7 +28,6 @@ def example_rollback_fn(uuid: str): class Migration(AsyncMigrationDefinition): - description = "An example async migration." posthog_min_version = "1.29.0" diff --git a/posthog/async_migrations/examples/test_migration.py b/posthog/async_migrations/examples/test_migration.py index 4b85264004865..7b3516c8ced91 100644 --- a/posthog/async_migrations/examples/test_migration.py +++ b/posthog/async_migrations/examples/test_migration.py @@ -26,7 +26,6 @@ def side_effect_rollback(self, _): class Migration(AsyncMigrationDefinition): - # For testing only!! fail = False error_message = "Healthcheck failed" diff --git a/posthog/async_migrations/examples/test_with_rollback_exception.py b/posthog/async_migrations/examples/test_with_rollback_exception.py index 75371d4cfba37..b17f52391c9d6 100644 --- a/posthog/async_migrations/examples/test_with_rollback_exception.py +++ b/posthog/async_migrations/examples/test_with_rollback_exception.py @@ -1,4 +1,7 @@ -from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation +from posthog.async_migrations.definition import ( + AsyncMigrationDefinition, + AsyncMigrationOperation, +) # For testing purposes @@ -8,7 +11,6 @@ def raise_exception_fn(_): class Migration(AsyncMigrationDefinition): - # For testing only!! description = "Another example async migration that's less realistic and used in tests." diff --git a/posthog/async_migrations/migrations/0001_events_sample_by.py b/posthog/async_migrations/migrations/0001_events_sample_by.py index 6a27833d38e0d..4098fd38f32a1 100644 --- a/posthog/async_migrations/migrations/0001_events_sample_by.py +++ b/posthog/async_migrations/migrations/0001_events_sample_by.py @@ -1,6 +1,9 @@ from typing import List -from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation +from posthog.async_migrations.definition import ( + AsyncMigrationDefinition, + AsyncMigrationOperation, +) """ Nooping this migration for future compatibility. Superseded by 0002_events_sample_by. @@ -10,7 +13,6 @@ class Migration(AsyncMigrationDefinition): - description = "Test migration" posthog_max_version = "1.33.9" diff --git a/posthog/async_migrations/migrations/0002_events_sample_by.py b/posthog/async_migrations/migrations/0002_events_sample_by.py index c4d7ca9181f67..7ad43de2934a3 100644 --- a/posthog/async_migrations/migrations/0002_events_sample_by.py +++ b/posthog/async_migrations/migrations/0002_events_sample_by.py @@ -59,7 +59,6 @@ def generate_insert_into_op(partition_gte: int, partition_lt=None) -> AsyncMigra class Migration(AsyncMigrationDefinition): - description = ( "Schema change to the events table ensuring our SAMPLE BY clause is compatible with ClickHouse >=21.7.0." ) @@ -138,7 +137,10 @@ def operations(self): ), AsyncMigrationOperation( fn=lambda query_id: run_optimize_table( - unique_name="0002_events_sample_by", query_id=query_id, table_name=EVENTS_TABLE_NAME, final=True + unique_name="0002_events_sample_by", + query_id=query_id, + table_name=EVENTS_TABLE_NAME, + final=True, ) ), ] diff --git a/posthog/async_migrations/migrations/0003_fill_person_distinct_id2.py b/posthog/async_migrations/migrations/0003_fill_person_distinct_id2.py index 3cb5f123c5124..ba1d6dd917292 100644 --- a/posthog/async_migrations/migrations/0003_fill_person_distinct_id2.py +++ b/posthog/async_migrations/migrations/0003_fill_person_distinct_id2.py @@ -1,6 +1,9 @@ from functools import cached_property -from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperationSQL +from posthog.async_migrations.definition import ( + AsyncMigrationDefinition, + AsyncMigrationOperationSQL, +) from posthog.client import sync_execute from posthog.constants import AnalyticsDBMS from posthog.settings import CLICKHOUSE_DATABASE @@ -29,7 +32,6 @@ class Migration(AsyncMigrationDefinition): - description = "Set up person_distinct_id2 table, speeding up person-related queries." depends_on = "0002_events_sample_by" diff --git a/posthog/async_migrations/migrations/0004_replicated_schema.py b/posthog/async_migrations/migrations/0004_replicated_schema.py index 18f54a315e621..9bdbdc4ebe5f3 100644 --- a/posthog/async_migrations/migrations/0004_replicated_schema.py +++ b/posthog/async_migrations/migrations/0004_replicated_schema.py @@ -49,7 +49,6 @@ class Migration(AsyncMigrationDefinition): - description = "Replace tables with replicated counterparts" depends_on = "0003_fill_person_distinct_id2" diff --git a/posthog/async_migrations/migrations/0005_person_replacing_by_version.py b/posthog/async_migrations/migrations/0005_person_replacing_by_version.py index 69e38de0a4bf8..276d6c54abed3 100644 --- a/posthog/async_migrations/migrations/0005_person_replacing_by_version.py +++ b/posthog/async_migrations/migrations/0005_person_replacing_by_version.py @@ -160,9 +160,14 @@ def operations(self): ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}' """, ), - AsyncMigrationOperationSQL(database=AnalyticsDBMS.CLICKHOUSE, sql=PERSONS_TABLE_MV_SQL, rollback=None), + AsyncMigrationOperationSQL( + database=AnalyticsDBMS.CLICKHOUSE, + sql=PERSONS_TABLE_MV_SQL, + rollback=None, + ), AsyncMigrationOperation( - fn=self.copy_persons_from_postgres, rollback_fn=lambda _: self.unset_highwatermark() + fn=self.copy_persons_from_postgres, + rollback_fn=lambda _: self.unset_highwatermark(), ), ] @@ -195,10 +200,16 @@ def copy_persons_from_postgres(self, query_id: str): should_continue = self._copy_batch_from_postgres(query_id) self.unset_highwatermark() run_optimize_table( - unique_name="0005_person_replacing_by_version", query_id=query_id, table_name=PERSON_TABLE, final=True + unique_name="0005_person_replacing_by_version", + query_id=query_id, + table_name=PERSON_TABLE, + final=True, ) except Exception as err: - logger.warn("Re-copying persons from postgres failed. Marking async migration as complete.", error=err) + logger.warn( + "Re-copying persons from postgres failed. Marking async migration as complete.", + error=err, + ) capture_exception(err) def _copy_batch_from_postgres(self, query_id: str) -> bool: diff --git a/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py index f83b509698394..62f539f333481 100644 --- a/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py +++ b/posthog/async_migrations/migrations/0006_persons_and_groups_on_events_backfill.py @@ -1,6 +1,9 @@ from typing import List -from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation +from posthog.async_migrations.definition import ( + AsyncMigrationDefinition, + AsyncMigrationOperation, +) """ Nooping this migration for future compatibility. Superseded by 0007_persons_and_groups_on_events_backfill. @@ -10,7 +13,6 @@ class Migration(AsyncMigrationDefinition): - description = "No-op migration" posthog_max_version = "1.41.99" diff --git a/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py index de0911a54a41a..9b140eedf8a1c 100644 --- a/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py +++ b/posthog/async_migrations/migrations/0007_persons_and_groups_on_events_backfill.py @@ -10,7 +10,11 @@ AsyncMigrationOperationSQL, ) from posthog.async_migrations.disk_util import analyze_enough_disk_space_free_for_table -from posthog.async_migrations.utils import execute_op_clickhouse, run_optimize_table, sleep_until_finished +from posthog.async_migrations.utils import ( + execute_op_clickhouse, + run_optimize_table, + sleep_until_finished, +) from posthog.client import sync_execute from posthog.models.event.sql import EVENTS_DATA_TABLE from posthog.utils import str_to_bool @@ -74,16 +78,36 @@ class Migration(AsyncMigrationDefinition): posthog_max_version = "1.41.99" parameters = { - "PERSON_DICT_CACHE_SIZE": (5000000, "ClickHouse cache size (in rows) for persons data.", int), + "PERSON_DICT_CACHE_SIZE": ( + 5000000, + "ClickHouse cache size (in rows) for persons data.", + int, + ), "PERSON_DISTINCT_ID_DICT_CACHE_SIZE": ( 5000000, "ClickHouse cache size (in rows) for person distinct id data.", int, ), - "GROUPS_DICT_CACHE_SIZE": (1000000, "ClickHouse cache size (in rows) for groups data.", int), - "RUN_DATA_VALIDATION_POSTCHECK": ("True", "Whether to run a postcheck validating the backfilled data.", str), - "TIMESTAMP_LOWER_BOUND": ("2020-01-01", "Timestamp lower bound for events to backfill", str), - "TIMESTAMP_UPPER_BOUND": ("2024-01-01", "Timestamp upper bound for events to backfill", str), + "GROUPS_DICT_CACHE_SIZE": ( + 1000000, + "ClickHouse cache size (in rows) for groups data.", + int, + ), + "RUN_DATA_VALIDATION_POSTCHECK": ( + "True", + "Whether to run a postcheck validating the backfilled data.", + str, + ), + "TIMESTAMP_LOWER_BOUND": ( + "2020-01-01", + "Timestamp lower bound for events to backfill", + str, + ), + "TIMESTAMP_UPPER_BOUND": ( + "2024-01-01", + "Timestamp upper bound for events to backfill", + str, + ), "TEAM_ID": ( None, "The team_id of team to run backfill for. If unset the backfill will run for all teams.", @@ -95,7 +119,6 @@ def precheck(self): return analyze_enough_disk_space_free_for_table(EVENTS_DATA_TABLE(), required_ratio=2.0) def is_required(self) -> bool: - # we don't check groupX_created_at columns as they are 0 by default rows_to_backfill_check = sync_execute( """ @@ -435,8 +458,9 @@ def _run_backfill_mutation(self, query_id): ) def _create_dictionaries(self, query_id): - execute_op_clickhouse( - f""" + ( + execute_op_clickhouse( + f""" CREATE DICTIONARY IF NOT EXISTS {settings.CLICKHOUSE_DATABASE}.person_dict {{on_cluster_clause}} ( team_id Int64, @@ -449,12 +473,14 @@ def _create_dictionaries(self, query_id): LAYOUT(complex_key_cache(size_in_cells %(cache_size)s max_threads_for_updates 6 allow_read_expired_keys 1)) Lifetime(60000) """, - {"cache_size": self.get_parameter("PERSON_DICT_CACHE_SIZE")}, - per_shard=True, - query_id=query_id, - ), - execute_op_clickhouse( - f""" + {"cache_size": self.get_parameter("PERSON_DICT_CACHE_SIZE")}, + per_shard=True, + query_id=query_id, + ), + ) + ( + execute_op_clickhouse( + f""" CREATE DICTIONARY IF NOT EXISTS {settings.CLICKHOUSE_DATABASE}.person_distinct_id2_dict {{on_cluster_clause}} ( team_id Int64, @@ -466,10 +492,11 @@ def _create_dictionaries(self, query_id): LAYOUT(complex_key_cache(size_in_cells %(cache_size)s max_threads_for_updates 6 allow_read_expired_keys 1)) Lifetime(60000) """, - {"cache_size": self.get_parameter("PERSON_DISTINCT_ID_DICT_CACHE_SIZE")}, - per_shard=True, - query_id=query_id, - ), + {"cache_size": self.get_parameter("PERSON_DISTINCT_ID_DICT_CACHE_SIZE")}, + per_shard=True, + query_id=query_id, + ), + ) execute_op_clickhouse( f""" CREATE DICTIONARY IF NOT EXISTS {settings.CLICKHOUSE_DATABASE}.groups_dict {{on_cluster_clause}} @@ -501,7 +528,10 @@ def _count_running_mutations(self): FROM clusterAllReplicas(%(cluster)s, system, 'mutations') WHERE not is_done AND command LIKE %(pattern)s """, - {"cluster": settings.CLICKHOUSE_CLUSTER, "pattern": "%person_created_at = toDateTime(0)%"}, + { + "cluster": settings.CLICKHOUSE_CLUSTER, + "pattern": "%person_created_at = toDateTime(0)%", + }, )[0][0] def _clear_temporary_tables(self, query_id): diff --git a/posthog/async_migrations/migrations/0008_speed_up_kafka_timestamp_filters.py b/posthog/async_migrations/migrations/0008_speed_up_kafka_timestamp_filters.py index b8a3fa65bfdf5..20d81d063cd26 100644 --- a/posthog/async_migrations/migrations/0008_speed_up_kafka_timestamp_filters.py +++ b/posthog/async_migrations/migrations/0008_speed_up_kafka_timestamp_filters.py @@ -3,7 +3,10 @@ import structlog from django.conf import settings -from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperationSQL +from posthog.async_migrations.definition import ( + AsyncMigrationDefinition, + AsyncMigrationOperationSQL, +) from posthog.client import sync_execute from posthog.constants import AnalyticsDBMS from posthog.version_requirement import ServiceVersionRequirement diff --git a/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py b/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py index ea21377f19b69..9b4c64c9af869 100644 --- a/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py +++ b/posthog/async_migrations/migrations/0009_minmax_indexes_for_materialized_columns.py @@ -1,6 +1,9 @@ from typing import List -from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation +from posthog.async_migrations.definition import ( + AsyncMigrationDefinition, + AsyncMigrationOperation, +) class Migration(AsyncMigrationDefinition): diff --git a/posthog/async_migrations/migrations/0010_move_old_partitions.py b/posthog/async_migrations/migrations/0010_move_old_partitions.py index 990e339bff581..8097224014f00 100644 --- a/posthog/async_migrations/migrations/0010_move_old_partitions.py +++ b/posthog/async_migrations/migrations/0010_move_old_partitions.py @@ -2,7 +2,10 @@ import structlog -from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperationSQL +from posthog.async_migrations.definition import ( + AsyncMigrationDefinition, + AsyncMigrationOperationSQL, +) from posthog.client import sync_execute from posthog.constants import AnalyticsDBMS from posthog.version_requirement import ServiceVersionRequirement @@ -20,9 +23,21 @@ class Migration(AsyncMigrationDefinition): posthog_max_version = "1.49.99" parameters = { - "OLDEST_PARTITION_TO_KEEP": ("200001", "ID of the oldest partition to keep", str), - "NEWEST_PARTITION_TO_KEEP": ("202308", "ID of the newest partition to keep", str), - "OPTIMIZE_TABLE": (False, "Optimize sharded_events table after moving partitions?", bool), + "OLDEST_PARTITION_TO_KEEP": ( + "200001", + "ID of the oldest partition to keep", + str, + ), + "NEWEST_PARTITION_TO_KEEP": ( + "202308", + "ID of the newest partition to keep", + str, + ), + "OPTIMIZE_TABLE": ( + False, + "Optimize sharded_events table after moving partitions?", + bool, + ), } service_version_requirements = [ServiceVersionRequirement(service="clickhouse", supported_version=">=22.3.0")] diff --git a/posthog/async_migrations/runner.py b/posthog/async_migrations/runner.py index 931ee5d67a232..78f2afcf21201 100644 --- a/posthog/async_migrations/runner.py +++ b/posthog/async_migrations/runner.py @@ -19,7 +19,11 @@ trigger_migration, update_async_migration, ) -from posthog.models.async_migration import AsyncMigration, MigrationStatus, get_all_running_async_migrations +from posthog.models.async_migration import ( + AsyncMigration, + MigrationStatus, + get_all_running_async_migrations, +) from posthog.models.instance_setting import get_instance_setting from posthog.models.utils import UUIDT from posthog.version_requirement import ServiceVersionRequirement @@ -33,7 +37,9 @@ def start_async_migration( - migration_name: str, ignore_posthog_version=False, migration_definition: Optional[AsyncMigrationDefinition] = None + migration_name: str, + ignore_posthog_version=False, + migration_definition: Optional[AsyncMigrationDefinition] = None, ) -> bool: """ Performs some basic checks to ensure the migration can indeed run, and then kickstarts the chain of operations @@ -63,7 +69,10 @@ def start_async_migration( if not ( ignore_posthog_version - or is_posthog_version_compatible(migration_instance.posthog_min_version, migration_instance.posthog_max_version) + or is_posthog_version_compatible( + migration_instance.posthog_min_version, + migration_instance.posthog_max_version, + ) ): process_error( migration_instance, @@ -102,7 +111,9 @@ def start_async_migration( ok, error = run_migration_precheck(migration_instance) if not ok: process_error( - migration_instance, f"Migration precheck failed with error:{error}", status=MigrationStatus.FailedAtStartup + migration_instance, + f"Migration precheck failed with error:{error}", + status=MigrationStatus.FailedAtStartup, ) return False @@ -245,7 +256,10 @@ def attempt_migration_rollback(migration_instance: AsyncMigration): return update_async_migration( - migration_instance=migration_instance, status=MigrationStatus.RolledBack, progress=0, current_operation_index=0 + migration_instance=migration_instance, + status=MigrationStatus.RolledBack, + progress=0, + current_operation_index=0, ) diff --git a/posthog/async_migrations/setup.py b/posthog/async_migrations/setup.py index 30a74b0acf76c..fff7205a4c8e0 100644 --- a/posthog/async_migrations/setup.py +++ b/posthog/async_migrations/setup.py @@ -6,7 +6,10 @@ from posthog.async_migrations.definition import AsyncMigrationDefinition from posthog.constants import FROZEN_POSTHOG_VERSION -from posthog.models.async_migration import AsyncMigration, get_all_completed_async_migrations +from posthog.models.async_migration import ( + AsyncMigration, + get_all_completed_async_migrations, +) from posthog.models.instance_setting import get_instance_setting from posthog.settings import TEST diff --git a/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py b/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py index 27c660a8c749d..4e6588ad45920 100644 --- a/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py +++ b/posthog/async_migrations/test/test_0007_persons_and_groups_on_events_backfill.py @@ -5,14 +5,25 @@ import pytest from posthog.async_migrations.runner import start_async_migration -from posthog.async_migrations.setup import get_async_migration_definition, setup_async_migrations +from posthog.async_migrations.setup import ( + get_async_migration_definition, + setup_async_migrations, +) from posthog.async_migrations.test.util import AsyncMigrationBaseTest from posthog.client import query_with_columns, sync_execute from posthog.models import Person -from posthog.models.async_migration import AsyncMigration, AsyncMigrationError, MigrationStatus +from posthog.models.async_migration import ( + AsyncMigration, + AsyncMigrationError, + MigrationStatus, +) from posthog.models.event.util import create_event from posthog.models.group.util import create_group -from posthog.models.person.util import create_person, create_person_distinct_id, delete_person +from posthog.models.person.util import ( + create_person, + create_person_distinct_id, + delete_person, +) from posthog.models.utils import UUIDT from posthog.test.base import ClickhouseTestMixin, run_clickhouse_statement_in_parallel @@ -269,7 +280,12 @@ def test_data_copy_groups(self): team=self.team, distinct_id="1", event="$pageview", - properties={"$group_0": "org:7", "$group_1": "77", "$group_2": "77", "$group_3": "77"}, + properties={ + "$group_0": "org:7", + "$group_1": "77", + "$group_2": "77", + "$group_3": "77", + }, ) # we need to also create person data so the backfill postcheck does not fail @@ -327,7 +343,10 @@ def test_rollback(self): migration_successful = run_migration() self.assertFalse(migration_successful) - self.assertEqual(AsyncMigration.objects.get(name=MIGRATION_NAME).status, MigrationStatus.RolledBack) + self.assertEqual( + AsyncMigration.objects.get(name=MIGRATION_NAME).status, + MigrationStatus.RolledBack, + ) MIGRATION_DEFINITION.operations[-1].fn = old_fn @@ -553,7 +572,8 @@ def test_check_person_data_failure(self): # Test that we fail the postcheck with the right message when 3 out of 101 events is incomplete (~2%) with self.assertRaisesRegex( - Exception, "Backfill did not work succesfully. ~2% of events did not get the correct data for persons." + Exception, + "Backfill did not work succesfully. ~2% of events did not get the correct data for persons.", ): MIGRATION_DEFINITION._check_person_data() # type: ignore diff --git a/posthog/async_migrations/test/test_0010_move_old_partitions.py b/posthog/async_migrations/test/test_0010_move_old_partitions.py index 272b51c1735c8..3cc21d3b67a58 100644 --- a/posthog/async_migrations/test/test_0010_move_old_partitions.py +++ b/posthog/async_migrations/test/test_0010_move_old_partitions.py @@ -1,7 +1,10 @@ import pytest from posthog.async_migrations.runner import start_async_migration -from posthog.async_migrations.setup import get_async_migration_definition, setup_async_migrations +from posthog.async_migrations.setup import ( + get_async_migration_definition, + setup_async_migrations, +) from posthog.async_migrations.test.util import AsyncMigrationBaseTest from posthog.models.event.util import create_event from posthog.models.utils import UUIDT @@ -24,18 +27,38 @@ def run_migration(): class Test0010MoveOldPartitions(AsyncMigrationBaseTest): def setUp(self): - MIGRATION_DEFINITION.parameters["OLDEST_PARTITION_TO_KEEP"] = ("202301", "", str) - MIGRATION_DEFINITION.parameters["NEWEST_PARTITION_TO_KEEP"] = ("202302", "", str) + MIGRATION_DEFINITION.parameters["OLDEST_PARTITION_TO_KEEP"] = ( + "202301", + "", + str, + ) + MIGRATION_DEFINITION.parameters["NEWEST_PARTITION_TO_KEEP"] = ( + "202302", + "", + str, + ) MIGRATION_DEFINITION.parameters["OPTIMIZE_TABLE"] = (False, "", bool) create_event( - event_uuid=uuid1, team=self.team, distinct_id="1", event="$pageview", timestamp="1900-01-02T00:00:00Z" + event_uuid=uuid1, + team=self.team, + distinct_id="1", + event="$pageview", + timestamp="1900-01-02T00:00:00Z", ) create_event( - event_uuid=uuid2, team=self.team, distinct_id="1", event="$pageview", timestamp="2022-02-02T00:00:00Z" + event_uuid=uuid2, + team=self.team, + distinct_id="1", + event="$pageview", + timestamp="2022-02-02T00:00:00Z", ) create_event( - event_uuid=uuid3, team=self.team, distinct_id="1", event="$pageview", timestamp="2045-02-02T00:00:00Z" + event_uuid=uuid3, + team=self.team, + distinct_id="1", + event="$pageview", + timestamp="2045-02-02T00:00:00Z", ) super().setUp() @@ -44,7 +67,6 @@ def tearDown(self): super().tearDown() def test_completes_successfully(self): - self.assertTrue(run_migration()) # create table + 3 move operations diff --git a/posthog/async_migrations/test/test_definition.py b/posthog/async_migrations/test/test_definition.py index 24c556841649a..1acdfa758499b 100644 --- a/posthog/async_migrations/test/test_definition.py +++ b/posthog/async_migrations/test/test_definition.py @@ -1,7 +1,10 @@ import pytest from infi.clickhouse_orm.utils import import_submodules -from posthog.async_migrations.definition import AsyncMigrationDefinition, AsyncMigrationOperation +from posthog.async_migrations.definition import ( + AsyncMigrationDefinition, + AsyncMigrationOperation, +) from posthog.async_migrations.setup import ( ASYNC_MIGRATIONS_EXAMPLE_MODULE_PATH, get_async_migration_definition, @@ -16,7 +19,10 @@ class TestAsyncMigrationDefinition(BaseTest): def test_get_async_migration_definition(self): - from posthog.async_migrations.examples.example import example_fn, example_rollback_fn + from posthog.async_migrations.examples.example import ( + example_fn, + example_rollback_fn, + ) modules = import_submodules(ASYNC_MIGRATIONS_EXAMPLE_MODULE_PATH) example_migration = modules["example"].Migration("example") @@ -28,7 +34,12 @@ def test_get_async_migration_definition(self): self.assertEqual(example_migration.posthog_max_version, "1.30.0") self.assertEqual(example_migration.operations[-1].fn, example_fn) self.assertEqual(example_migration.operations[-1].rollback_fn, example_rollback_fn) - self.assertTrue(isinstance(example_migration.service_version_requirements[0], ServiceVersionRequirement)) + self.assertTrue( + isinstance( + example_migration.service_version_requirements[0], + ServiceVersionRequirement, + ) + ) def test_get_migration_instance_and_parameters(self): setup_async_migrations(ignore_posthog_version=True) @@ -41,7 +52,8 @@ def test_get_migration_instance_and_parameters(self): self.assertEqual(definition.migration_instance(), instance) self.assertEqual( - definition.get_parameter("PERSON_DICT_CACHE_SIZE"), definition.parameters["PERSON_DICT_CACHE_SIZE"][0] + definition.get_parameter("PERSON_DICT_CACHE_SIZE"), + definition.parameters["PERSON_DICT_CACHE_SIZE"][0], ) instance.parameters = {"PERSON_DICT_CACHE_SIZE": 123} diff --git a/posthog/async_migrations/test/test_migrations_not_required.py b/posthog/async_migrations/test/test_migrations_not_required.py index 76f9de401e097..9665f534ac81f 100644 --- a/posthog/async_migrations/test/test_migrations_not_required.py +++ b/posthog/async_migrations/test/test_migrations_not_required.py @@ -7,6 +7,7 @@ pytestmark = pytest.mark.async_migrations + # Async migrations are data migrations aimed at getting users from an old schema to a new schema # Fresh installs should have the new schema, however. So check that async migrations are being # written correctly such that this is the case @@ -19,4 +20,7 @@ def setUp(self): def test_async_migrations_not_required_on_fresh_instances(self): for name, migration in ALL_ASYNC_MIGRATIONS.items(): - self.assertFalse(migration.is_required(), f"Async migration {name} is_required returned True") + self.assertFalse( + migration.is_required(), + f"Async migration {name} is_required returned True", + ) diff --git a/posthog/async_migrations/test/test_runner.py b/posthog/async_migrations/test/test_runner.py index f433a5e36be3e..9c4a7b1fe5ea5 100644 --- a/posthog/async_migrations/test/test_runner.py +++ b/posthog/async_migrations/test/test_runner.py @@ -9,9 +9,16 @@ run_async_migration_next_op, start_async_migration, ) -from posthog.async_migrations.test.util import AsyncMigrationBaseTest, create_async_migration +from posthog.async_migrations.test.util import ( + AsyncMigrationBaseTest, + create_async_migration, +) from posthog.async_migrations.utils import update_async_migration -from posthog.models.async_migration import AsyncMigration, AsyncMigrationError, MigrationStatus +from posthog.models.async_migration import ( + AsyncMigration, + AsyncMigrationError, + MigrationStatus, +) from posthog.models.utils import UUIDT pytestmark = pytest.mark.async_migrations @@ -52,7 +59,6 @@ def test_run_migration_in_full(self): self.assertEqual(self.migration.sec.side_effect_rollback_count, 0) def test_rollback_migration(self): - self.migration.sec.reset_count() migration_successful = start_async_migration("test_migration") diff --git a/posthog/async_migrations/test/test_utils.py b/posthog/async_migrations/test/test_utils.py index f2e45a24ab042..da01ec9dda54d 100644 --- a/posthog/async_migrations/test/test_utils.py +++ b/posthog/async_migrations/test/test_utils.py @@ -4,7 +4,10 @@ import pytest from posthog.async_migrations.definition import AsyncMigrationOperationSQL -from posthog.async_migrations.test.util import AsyncMigrationBaseTest, create_async_migration +from posthog.async_migrations.test.util import ( + AsyncMigrationBaseTest, + create_async_migration, +) from posthog.async_migrations.utils import ( complete_migration, execute_on_each_shard, diff --git a/posthog/async_migrations/utils.py b/posthog/async_migrations/utils.py index efa70424e8846..20ad64cf7d75b 100644 --- a/posthog/async_migrations/utils.py +++ b/posthog/async_migrations/utils.py @@ -15,7 +15,11 @@ from posthog.clickhouse.client.connection import make_ch_pool from posthog.clickhouse.query_tagging import reset_query_tags, tag_queries from posthog.email import is_email_available -from posthog.models.async_migration import AsyncMigration, AsyncMigrationError, MigrationStatus +from posthog.models.async_migration import ( + AsyncMigration, + AsyncMigrationError, + MigrationStatus, +) from posthog.models.instance_setting import get_instance_setting from posthog.models.user import User from posthog.settings import ( @@ -154,7 +158,13 @@ def sleep_until_finished(name, is_running: Callable[[], bool]) -> None: def run_optimize_table( - *, unique_name: str, query_id: str, table_name: str, deduplicate=False, final=False, per_shard=False + *, + unique_name: str, + query_id: str, + table_name: str, + deduplicate=False, + final=False, + per_shard=False, ): """ Runs the passed OPTIMIZE TABLE query. @@ -163,7 +173,10 @@ def run_optimize_table( we'll wait for that to complete first. """ if not TEST and _get_number_running_on_cluster(f"%%optimize:{unique_name}%%") > 0: - sleep_until_finished(unique_name, lambda: _get_number_running_on_cluster(f"%%optimize:{unique_name}%%") > 0) + sleep_until_finished( + unique_name, + lambda: _get_number_running_on_cluster(f"%%optimize:{unique_name}%%") > 0, + ) else: final_clause = "FINAL" if final else "" deduplicate_clause = "DEDUPLICATE" if deduplicate else "" @@ -175,7 +188,10 @@ def run_optimize_table( execute_op_clickhouse( sql, query_id=f"optimize:{unique_name}/{query_id}", - settings={"max_execution_time": ASYNC_MIGRATIONS_DEFAULT_TIMEOUT_SECONDS, "mutations_sync": 2}, + settings={ + "max_execution_time": ASYNC_MIGRATIONS_DEFAULT_TIMEOUT_SECONDS, + "mutations_sync": 2, + }, per_shard=per_shard, ) @@ -213,7 +229,9 @@ def process_error( from posthog.tasks.email import send_async_migration_errored_email send_async_migration_errored_email.delay( - migration_key=migration_instance.name, time=now().isoformat(), error=error + migration_key=migration_instance.name, + time=now().isoformat(), + error=error, ) if ( @@ -237,7 +255,9 @@ def trigger_migration(migration_instance: AsyncMigration, fresh_start: bool = Tr def force_stop_migration( - migration_instance: AsyncMigration, error: str = "Force stopped by user", rollback: bool = True + migration_instance: AsyncMigration, + error: str = "Force stopped by user", + rollback: bool = True, ): """ In theory this is dangerous, as it can cause another task to be lost @@ -299,7 +319,10 @@ def mark_async_migration_as_running(migration_instance: AsyncMigration) -> bool: # update to running iff the state was Starting (ui triggered) or NotStarted (api triggered) with transaction.atomic(): instance = AsyncMigration.objects.select_for_update().get(pk=migration_instance.pk) - if instance.status not in [MigrationStatus.Starting, MigrationStatus.NotStarted]: + if instance.status not in [ + MigrationStatus.Starting, + MigrationStatus.NotStarted, + ]: return False instance.status = MigrationStatus.Running instance.current_query_id = "" diff --git a/posthog/batch_exports/http.py b/posthog/batch_exports/http.py index 06fb9866ac0e9..aa71cc9060a13 100644 --- a/posthog/batch_exports/http.py +++ b/posthog/batch_exports/http.py @@ -88,7 +88,11 @@ class RunsCursorPagination(CursorPagination): class BatchExportRunViewSet(StructuredViewSetMixin, viewsets.ReadOnlyModelViewSet): queryset = BatchExportRun.objects.all() - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] serializer_class = BatchExportRunSerializer pagination_class = RunsCursorPagination @@ -98,7 +102,8 @@ def get_queryset(self, date_range: tuple[dt.datetime, dt.datetime] | None = None if date_range: return self.queryset.filter( - batch_export_id=self.kwargs["parent_lookup_batch_export_id"], created_at__range=date_range + batch_export_id=self.kwargs["parent_lookup_batch_export_id"], + created_at__range=date_range, ).order_by("-created_at") else: return self.queryset.filter(batch_export_id=self.kwargs["parent_lookup_batch_export_id"]).order_by( @@ -178,7 +183,10 @@ def create(self, validated_data: dict) -> BatchExport: str(team.uuid), groups={"organization": str(team.organization.id)}, group_properties={ - "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at} + "organization": { + "id": str(team.organization.id), + "created_at": team.organization.created_at, + } }, send_feature_flag_events=False, ): @@ -216,7 +224,11 @@ def update(self, batch_export: BatchExport, validated_data: dict) -> BatchExport class BatchExportViewSet(StructuredViewSetMixin, viewsets.ModelViewSet): queryset = BatchExport.objects.all() - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] serializer_class = BatchExportSerializer def get_queryset(self): @@ -319,7 +331,11 @@ class Meta: class BatchExportLogViewSet(StructuredViewSetMixin, mixins.ListModelMixin, viewsets.GenericViewSet): - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] serializer_class = BatchExportLogEntrySerializer def get_queryset(self): diff --git a/posthog/batch_exports/models.py b/posthog/batch_exports/models.py index 633163b831238..dc86c2ce7286a 100644 --- a/posthog/batch_exports/models.py +++ b/posthog/batch_exports/models.py @@ -38,7 +38,9 @@ class Destination(models.TextChoices): } type: models.CharField = models.CharField( - choices=Destination.choices, max_length=64, help_text="A choice of supported BatchExportDestination types." + choices=Destination.choices, + max_length=64, + help_text="A choice of supported BatchExportDestination types.", ) config: models.JSONField = models.JSONField( default=dict, @@ -46,10 +48,12 @@ class Destination(models.TextChoices): help_text="A JSON field to store all configuration parameters required to access a BatchExportDestination.", ) created_at: models.DateTimeField = models.DateTimeField( - auto_now_add=True, help_text="The timestamp at which this BatchExportDestination was created." + auto_now_add=True, + help_text="The timestamp at which this BatchExportDestination was created.", ) last_updated_at: models.DateTimeField = models.DateTimeField( - auto_now=True, help_text="The timestamp at which this BatchExportDestination was last updated." + auto_now=True, + help_text="The timestamp at which this BatchExportDestination was last updated.", ) @@ -74,7 +78,9 @@ class Status(models.TextChoices): STARTING = "Starting" batch_export = models.ForeignKey( - "BatchExport", on_delete=models.CASCADE, help_text="The BatchExport this run belongs to." + "BatchExport", + on_delete=models.CASCADE, + help_text="The BatchExport this run belongs to.", ) status: models.CharField = models.CharField( choices=Status.choices, max_length=64, help_text="The status of this run." @@ -89,17 +95,25 @@ class Status(models.TextChoices): data_interval_end: models.DateTimeField = models.DateTimeField(help_text="The end of the data interval.") cursor: models.TextField = models.TextField(null=True, help_text="An opaque cursor that may be used to resume.") created_at: models.DateTimeField = models.DateTimeField( - auto_now_add=True, help_text="The timestamp at which this BatchExportRun was created." + auto_now_add=True, + help_text="The timestamp at which this BatchExportRun was created.", ) finished_at: models.DateTimeField = models.DateTimeField( - null=True, help_text="The timestamp at which this BatchExportRun finished, successfully or not." + null=True, + help_text="The timestamp at which this BatchExportRun finished, successfully or not.", ) last_updated_at: models.DateTimeField = models.DateTimeField( - auto_now=True, help_text="The timestamp at which this BatchExportRun was last updated." + auto_now=True, + help_text="The timestamp at which this BatchExportRun was last updated.", ) -BATCH_EXPORT_INTERVALS = [("hour", "hour"), ("day", "day"), ("week", "week"), ("every 5 minutes", "every 5 minutes")] +BATCH_EXPORT_INTERVALS = [ + ("hour", "hour"), + ("day", "day"), + ("week", "week"), + ("every 5 minutes", "every 5 minutes"), +] class BatchExport(UUIDModel): @@ -113,7 +127,9 @@ class BatchExport(UUIDModel): team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, help_text="The team this belongs to.") name: models.TextField = models.TextField(help_text="A human-readable name for this BatchExport.") destination: models.ForeignKey = models.ForeignKey( - "BatchExportDestination", on_delete=models.CASCADE, help_text="The destination to export data to." + "BatchExportDestination", + on_delete=models.CASCADE, + help_text="The destination to export data to.", ) interval = models.CharField( max_length=64, @@ -125,19 +141,27 @@ class BatchExport(UUIDModel): paused = models.BooleanField(default=False, help_text="Whether this BatchExport is paused or not.") deleted = models.BooleanField(default=False, help_text="Whether this BatchExport is deleted or not.") created_at: models.DateTimeField = models.DateTimeField( - auto_now_add=True, help_text="The timestamp at which this BatchExport was created." + auto_now_add=True, + help_text="The timestamp at which this BatchExport was created.", ) last_updated_at: models.DateTimeField = models.DateTimeField( - auto_now=True, help_text="The timestamp at which this BatchExport was last updated." + auto_now=True, + help_text="The timestamp at which this BatchExport was last updated.", ) last_paused_at: models.DateTimeField = models.DateTimeField( - null=True, default=None, help_text="The timestamp at which this BatchExport was last paused." + null=True, + default=None, + help_text="The timestamp at which this BatchExport was last paused.", ) start_at: models.DateTimeField = models.DateTimeField( - null=True, default=None, help_text="Time before which any Batch Export runs won't be triggered." + null=True, + default=None, + help_text="Time before which any Batch Export runs won't be triggered.", ) end_at: models.DateTimeField = models.DateTimeField( - null=True, default=None, help_text="Time after which any Batch Export runs won't be triggered." + null=True, + default=None, + help_text="Time after which any Batch Export runs won't be triggered.", ) @property @@ -244,7 +268,9 @@ class Status(models.TextChoices): team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, help_text="The team this belongs to.") batch_export = models.ForeignKey( - "BatchExport", on_delete=models.CASCADE, help_text="The BatchExport this backfill belongs to." + "BatchExport", + on_delete=models.CASCADE, + help_text="The BatchExport this backfill belongs to.", ) start_at: models.DateTimeField = models.DateTimeField(help_text="The start of the data interval.") end_at: models.DateTimeField = models.DateTimeField(help_text="The end of the data interval.") @@ -252,11 +278,14 @@ class Status(models.TextChoices): choices=Status.choices, max_length=64, help_text="The status of this backfill." ) created_at: models.DateTimeField = models.DateTimeField( - auto_now_add=True, help_text="The timestamp at which this BatchExportBackfill was created." + auto_now_add=True, + help_text="The timestamp at which this BatchExportBackfill was created.", ) finished_at: models.DateTimeField = models.DateTimeField( - null=True, help_text="The timestamp at which this BatchExportBackfill finished, successfully or not." + null=True, + help_text="The timestamp at which this BatchExportBackfill finished, successfully or not.", ) last_updated_at: models.DateTimeField = models.DateTimeField( - auto_now=True, help_text="The timestamp at which this BatchExportBackfill was last updated." + auto_now=True, + help_text="The timestamp at which this BatchExportBackfill was last updated.", ) diff --git a/posthog/batch_exports/service.py b/posthog/batch_exports/service.py index ffe5ee0b692d9..008096d5f50bc 100644 --- a/posthog/batch_exports/service.py +++ b/posthog/batch_exports/service.py @@ -186,7 +186,10 @@ async def pause_schedule(temporal: Client, schedule_id: str, note: str | None = def unpause_batch_export( - temporal: Client, batch_export_id: str, note: str | None = None, backfill: bool = False + temporal: Client, + batch_export_id: str, + note: str | None = None, + backfill: bool = False, ) -> None: """Pause this BatchExport. diff --git a/posthog/caching/calculate_results.py b/posthog/caching/calculate_results.py index 73fc11de7b54f..be11c4ffe48b5 100644 --- a/posthog/caching/calculate_results.py +++ b/posthog/caching/calculate_results.py @@ -16,12 +16,23 @@ ) from posthog.decorators import CacheType from posthog.logging.timing import timed -from posthog.models import Dashboard, DashboardTile, EventDefinition, Filter, Insight, RetentionFilter, Team +from posthog.models import ( + Dashboard, + DashboardTile, + EventDefinition, + Filter, + Insight, + RetentionFilter, + Team, +) from posthog.models.filters import PathFilter from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.filters.utils import get_filter from posthog.models.insight import generate_insight_cache_key -from posthog.queries.funnels import ClickhouseFunnelTimeToConvert, ClickhouseFunnelTrends +from posthog.queries.funnels import ( + ClickhouseFunnelTimeToConvert, + ClickhouseFunnelTrends, +) from posthog.queries.funnels.utils import get_funnel_order_class from posthog.queries.paths import Paths from posthog.queries.retention import Retention @@ -225,6 +236,10 @@ def _events_from_filter(filter: Union[RetentionFilter, StickinessFilter, PathFil return [] except Exception as exc: - logger.error("update_cache_item.could_not_list_events_from_filter", exc=exc, exc_info=True) + logger.error( + "update_cache_item.could_not_list_events_from_filter", + exc=exc, + exc_info=True, + ) capture_exception(exc) return [] diff --git a/posthog/caching/fetch_from_cache.py b/posthog/caching/fetch_from_cache.py index b507cdf4d277e..d7c0e5e03e50a 100644 --- a/posthog/caching/fetch_from_cache.py +++ b/posthog/caching/fetch_from_cache.py @@ -5,14 +5,19 @@ from django.utils.timezone import now from prometheus_client import Counter -from posthog.caching.calculate_results import calculate_cache_key, calculate_result_by_insight +from posthog.caching.calculate_results import ( + calculate_cache_key, + calculate_result_by_insight, +) from posthog.caching.insight_cache import update_cached_state from posthog.models import DashboardTile, Insight from posthog.models.dashboard import Dashboard from posthog.utils import get_safe_cache insight_cache_read_counter = Counter( - "posthog_cloud_insight_cache_read", "A read from the redis insight cache", labelnames=["result"] + "posthog_cloud_insight_cache_read", + "A read from the redis insight cache", + labelnames=["result"], ) @@ -72,7 +77,9 @@ def fetch_cached_insight_result(target: Union[Insight, DashboardTile], refresh_f def synchronously_update_cache( - insight: Insight, dashboard: Optional[Dashboard], refresh_frequency: Optional[timedelta] = None + insight: Insight, + dashboard: Optional[Dashboard], + refresh_frequency: Optional[timedelta] = None, ) -> InsightResult: cache_key, cache_type, result = calculate_result_by_insight(team=insight.team, insight=insight, dashboard=dashboard) timestamp = now() diff --git a/posthog/caching/insight_cache.py b/posthog/caching/insight_cache.py index b019fd774df39..d1214c3a67a98 100644 --- a/posthog/caching/insight_cache.py +++ b/posthog/caching/insight_cache.py @@ -42,7 +42,9 @@ def schedule_cache_updates(): if len(representative_by_cache_key) > 0: logger.warn( - "Scheduled caches to be updated", candidates=len(to_update), tasks_created=len(representative_by_cache_key) + "Scheduled caches to be updated", + candidates=len(to_update), + tasks_created=len(representative_by_cache_key), ) else: logger.warn("No caches were found to be updated") @@ -120,7 +122,12 @@ def update_cache(caching_state_id: UUID): statsd.incr("caching_state_update_success") statsd.incr("caching_state_update_rows_updated", rows_updated) statsd.timing("caching_state_update_success_timing", duration) - logger.warn("Re-calculated insight cache", rows_updated=rows_updated, duration=duration, **metadata) + logger.warn( + "Re-calculated insight cache", + rows_updated=rows_updated, + duration=duration, + **metadata, + ) else: logger.warn( "Failed to re-calculate insight cache", @@ -137,11 +144,18 @@ def update_cache(caching_state_id: UUID): update_cache_task.apply_async(args=[caching_state_id], countdown=timedelta(minutes=10).total_seconds()) InsightCachingState.objects.filter(pk=caching_state.pk).update( - refresh_attempt=caching_state.refresh_attempt + 1, last_refresh_queued_at=now() + refresh_attempt=caching_state.refresh_attempt + 1, + last_refresh_queued_at=now(), ) -def update_cached_state(team_id: int, cache_key: str, timestamp: datetime, result: Any, ttl: Optional[int] = None): +def update_cached_state( + team_id: int, + cache_key: str, + timestamp: datetime, + result: Any, + ttl: Optional[int] = None, +): cache.set(cache_key, result, ttl if ttl is not None else settings.CACHED_RESULTS_TTL) insight_cache_write_counter.inc() @@ -156,6 +170,9 @@ def _extract_insight_dashboard(caching_state: InsightCachingState) -> Tuple[Insi if caching_state.dashboard_tile is not None: assert caching_state.dashboard_tile.insight is not None - return caching_state.dashboard_tile.insight, caching_state.dashboard_tile.dashboard + return ( + caching_state.dashboard_tile.insight, + caching_state.dashboard_tile.dashboard, + ) else: return caching_state.insight, None diff --git a/posthog/caching/insight_caching_state.py b/posthog/caching/insight_caching_state.py index fc87915c25a98..a8ae36c14f05a 100644 --- a/posthog/caching/insight_caching_state.py +++ b/posthog/caching/insight_caching_state.py @@ -20,6 +20,7 @@ logger = structlog.get_logger(__name__) + # :TODO: Make these configurable class TargetCacheAge(Enum): NO_CACHING = None @@ -95,7 +96,12 @@ def sync_insight_cache_states(): tiles = ( DashboardTile.objects.all() .filter(insight__isnull=False) - .prefetch_related("dashboard", "dashboard__sharingconfiguration_set", "insight", "insight__team") + .prefetch_related( + "dashboard", + "dashboard__sharingconfiguration_set", + "insight", + "insight__team", + ) .order_by("pk") ) @@ -105,7 +111,10 @@ def sync_insight_cache_states(): def upsert( - team: Team, target: Union[DashboardTile, Insight], lazy_loader: Optional[LazyLoader] = None, execute=True + team: Team, + target: Union[DashboardTile, Insight], + lazy_loader: Optional[LazyLoader] = None, + execute=True, ) -> Optional[InsightCachingState]: lazy_loader = lazy_loader or LazyLoader() cache_key = calculate_cache_key(target) @@ -129,7 +138,11 @@ def upsert( return model -def sync_insight_caching_state(team_id: int, insight_id: Optional[int] = None, dashboard_tile_id: Optional[int] = None): +def sync_insight_caching_state( + team_id: int, + insight_id: Optional[int] = None, + dashboard_tile_id: Optional[int] = None, +): try: team = Team.objects.get(pk=team_id) item: Optional[DashboardTile | Insight] = None diff --git a/posthog/caching/insights_api.py b/posthog/caching/insights_api.py index 399e889cf18af..1b07f37bc7804 100644 --- a/posthog/caching/insights_api.py +++ b/posthog/caching/insights_api.py @@ -5,7 +5,10 @@ import zoneinfo from rest_framework import request -from posthog.caching.calculate_results import CLICKHOUSE_MAX_EXECUTION_TIME, calculate_cache_key +from posthog.caching.calculate_results import ( + CLICKHOUSE_MAX_EXECUTION_TIME, + calculate_cache_key, +) from posthog.caching.insight_caching_state import InsightCachingState from posthog.models import DashboardTile, Insight from posthog.models.filters.utils import get_filter @@ -25,7 +28,11 @@ def should_refresh_insight( - insight: Insight, dashboard_tile: Optional[DashboardTile], *, request: request.Request, is_shared=False + insight: Insight, + dashboard_tile: Optional[DashboardTile], + *, + request: request.Request, + is_shared=False, ) -> Tuple[bool, timedelta]: """Return whether the insight should be refreshed now, and what's the minimum wait time between refreshes. diff --git a/posthog/caching/test/test_fetch_from_cache.py b/posthog/caching/test/test_fetch_from_cache.py index 4ffd44d24eca3..6ac03f0a0e451 100644 --- a/posthog/caching/test/test_fetch_from_cache.py +++ b/posthog/caching/test/test_fetch_from_cache.py @@ -11,7 +11,13 @@ ) from posthog.decorators import CacheType from posthog.models import Insight -from posthog.test.base import BaseTest, ClickhouseTestMixin, _create_event, _create_insight, flush_persons_and_events +from posthog.test.base import ( + BaseTest, + ClickhouseTestMixin, + _create_event, + _create_insight, + flush_persons_and_events, +) from posthog.utils import get_safe_cache @@ -20,12 +26,24 @@ class TestFetchFromCache(ClickhouseTestMixin, BaseTest): def setUp(self): super().setUp() - _create_event(team=self.team, event="$pageview", distinct_id="1", properties={"prop": "val"}) - _create_event(team=self.team, event="$pageview", distinct_id="2", properties={"prop": "another_val"}) + _create_event( + team=self.team, + event="$pageview", + distinct_id="1", + properties={"prop": "val"}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="2", + properties={"prop": "another_val"}, + ) flush_persons_and_events() insight, dashboard, dashboard_tile = _create_insight( - self.team, {"events": [{"id": "$pageview"}], "properties": []}, {"properties": [{}]} + self.team, + {"events": [{"id": "$pageview"}], "properties": []}, + {"properties": [{}]}, ) self.dashboard = dashboard self.insight = insight diff --git a/posthog/caching/test/test_insight_cache.py b/posthog/caching/test/test_insight_cache.py index 99ff1d8ca63d2..1dbe0b5ce2dc1 100644 --- a/posthog/caching/test/test_insight_cache.py +++ b/posthog/caching/test/test_insight_cache.py @@ -7,10 +7,19 @@ from freezegun import freeze_time from posthog.caching.calculate_results import get_cache_type -from posthog.caching.insight_cache import fetch_states_in_need_of_updating, schedule_cache_updates, update_cache +from posthog.caching.insight_cache import ( + fetch_states_in_need_of_updating, + schedule_cache_updates, + update_cache, +) from posthog.caching.insight_caching_state import upsert from posthog.caching.test.test_insight_caching_state import create_insight, filter_dict -from posthog.constants import INSIGHT_PATHS, INSIGHT_RETENTION, INSIGHT_STICKINESS, INSIGHT_TRENDS +from posthog.constants import ( + INSIGHT_PATHS, + INSIGHT_RETENTION, + INSIGHT_STICKINESS, + INSIGHT_TRENDS, +) from posthog.decorators import CacheType from posthog.models import Filter, InsightCachingState, RetentionFilter, Team, User from posthog.models.filters import PathFilter @@ -64,7 +73,10 @@ def test_schedule_cache_updates(update_cache_task, team: Team, user: User): schedule_cache_updates() - assert update_cache_task.delay.call_args_list == [call(caching_state1.pk), call(caching_state3.pk)] + assert update_cache_task.delay.call_args_list == [ + call(caching_state1.pk), + call(caching_state3.pk), + ] last_refresh_queued_at = InsightCachingState.objects.filter(team=team).values_list( "last_refresh_queued_at", flat=True @@ -81,9 +93,27 @@ def test_schedule_cache_updates(update_cache_task, team: Team, user: User): ({"last_refresh": None}, 1), ({"target_cache_age": None, "last_refresh": None}, 0), ({"target_cache_age": timedelta(days=1), "last_refresh": timedelta(days=2)}, 1), - ({"target_cache_age": timedelta(days=1), "last_refresh": timedelta(hours=23)}, 0), - ({"target_cache_age": timedelta(days=1), "last_refresh_queued_at": timedelta(hours=23)}, 1), - ({"target_cache_age": timedelta(days=1), "last_refresh_queued_at": timedelta(minutes=5)}, 0), + ( + { + "target_cache_age": timedelta(days=1), + "last_refresh": timedelta(hours=23), + }, + 0, + ), + ( + { + "target_cache_age": timedelta(days=1), + "last_refresh_queued_at": timedelta(hours=23), + }, + 1, + ), + ( + { + "target_cache_age": timedelta(days=1), + "last_refresh_queued_at": timedelta(minutes=5), + }, + 0, + ), ({"refresh_attempt": 2}, 1), ({"refresh_attempt": 3}, 0), ], @@ -137,7 +167,11 @@ def test_update_cache_updates_identical_cache_keys(team: Team, user: User, cache @patch("posthog.celery.update_cache_task") @patch("posthog.caching.insight_cache.calculate_result_by_insight") def test_update_cache_when_calculation_fails( - spy_calculate_result_by_insight, spy_update_cache_task, team: Team, user: User, cache + spy_calculate_result_by_insight, + spy_update_cache_task, + team: Team, + user: User, + cache, ): caching_state = create_insight_caching_state(team, user, refresh_attempt=1) spy_calculate_result_by_insight.side_effect = Exception() @@ -180,6 +214,11 @@ def test_update_cache_when_recently_refreshed(spy_calculate_result_by_insight, t ], ) @pytest.mark.django_db -def test_get_cache_type(team: Team, filter_model: Callable, insight_type: str, expected_cache_type: CacheType) -> None: +def test_get_cache_type( + team: Team, + filter_model: Callable, + insight_type: str, + expected_cache_type: CacheType, +) -> None: filter = filter_model(data={"insight": insight_type}, team=team) assert get_cache_type(filter) == expected_cache_type diff --git a/posthog/caching/test/test_insight_caching_state.py b/posthog/caching/test/test_insight_caching_state.py index 9b6f60aecf1c9..03a3652555202 100644 --- a/posthog/caching/test/test_insight_caching_state.py +++ b/posthog/caching/test/test_insight_caching_state.py @@ -52,7 +52,12 @@ def create_insight( insight = Insight.objects.create(team=team, filters=filters, deleted=deleted, query=query) if viewed_at_delta is not None: - InsightViewed.objects.create(insight=insight, last_viewed_at=now() - viewed_at_delta, user=user, team=team) + InsightViewed.objects.create( + insight=insight, + last_viewed_at=now() - viewed_at_delta, + user=user, + team=team, + ) if is_shared: SharingConfiguration.objects.create(team=team, insight=insight, enabled=True) @@ -78,7 +83,9 @@ def create_tile( mock_active_teams.return_value = {team.pk} if team_should_be_active else set() dashboard = Dashboard.objects.create( - team=team, last_accessed_at=now() - viewed_at_delta if viewed_at_delta else None, deleted=dashboard_deleted + team=team, + last_accessed_at=now() - viewed_at_delta if viewed_at_delta else None, + deleted=dashboard_deleted, ) if on_home_dashboard: @@ -109,36 +116,91 @@ def create_tile( [ # Insight test cases pytest.param(create_insight, {}, TargetCacheAge.MID_PRIORITY, id="shared insight (base)"), - pytest.param(create_insight, {"is_shared": False}, TargetCacheAge.NO_CACHING, id="not shared insight"), pytest.param( - create_insight, {"team_should_be_active": False}, TargetCacheAge.NO_CACHING, id="insight with inactive team" + create_insight, + {"is_shared": False}, + TargetCacheAge.NO_CACHING, + id="not shared insight", + ), + pytest.param( + create_insight, + {"team_should_be_active": False}, + TargetCacheAge.NO_CACHING, + id="insight with inactive team", + ), + pytest.param( + create_insight, + {"viewed_at_delta": None}, + TargetCacheAge.NO_CACHING, + id="insight never viewed", ), - pytest.param(create_insight, {"viewed_at_delta": None}, TargetCacheAge.NO_CACHING, id="insight never viewed"), pytest.param( create_insight, {"viewed_at_delta": timedelta(weeks=100)}, TargetCacheAge.NO_CACHING, id="insight viewed long time ago", ), - pytest.param(create_insight, {"filters": {}}, TargetCacheAge.NO_CACHING, id="insight with no filters"), - pytest.param(create_insight, {"deleted": True}, TargetCacheAge.NO_CACHING, id="deleted insight"), + pytest.param( + create_insight, + {"filters": {}}, + TargetCacheAge.NO_CACHING, + id="insight with no filters", + ), + pytest.param( + create_insight, + {"deleted": True}, + TargetCacheAge.NO_CACHING, + id="deleted insight", + ), # Dashboard tile test cases pytest.param(create_tile, {}, TargetCacheAge.LOW_PRIORITY, id="shared tile (base)"), - pytest.param(create_tile, {"is_dashboard_shared": False}, TargetCacheAge.NO_CACHING, id="not shared tile"), pytest.param( - create_tile, {"team_should_be_active": False}, TargetCacheAge.NO_CACHING, id="tile with inactive team" + create_tile, + {"is_dashboard_shared": False}, + TargetCacheAge.NO_CACHING, + id="not shared tile", + ), + pytest.param( + create_tile, + {"team_should_be_active": False}, + TargetCacheAge.NO_CACHING, + id="tile with inactive team", + ), + pytest.param( + create_tile, + {"dashboard_tile_deleted": True}, + TargetCacheAge.NO_CACHING, + id="deleted tile", + ), + pytest.param( + create_tile, + {"dashboard_deleted": True}, + TargetCacheAge.NO_CACHING, + id="tile with deleted dashboard", ), - pytest.param(create_tile, {"dashboard_tile_deleted": True}, TargetCacheAge.NO_CACHING, id="deleted tile"), pytest.param( - create_tile, {"dashboard_deleted": True}, TargetCacheAge.NO_CACHING, id="tile with deleted dashboard" + create_tile, + {"insight_deleted": True}, + TargetCacheAge.NO_CACHING, + id="tile with deleted insight", ), - pytest.param(create_tile, {"insight_deleted": True}, TargetCacheAge.NO_CACHING, id="tile with deleted insight"), pytest.param( - create_tile, {"insight_filters": {}}, TargetCacheAge.NO_CACHING, id="tile with insight with no filters" + create_tile, + {"insight_filters": {}}, + TargetCacheAge.NO_CACHING, + id="tile with insight with no filters", ), - pytest.param(create_tile, {"text_tile": True}, TargetCacheAge.NO_CACHING, id="tile with text"), pytest.param( - create_tile, {"on_home_dashboard": True}, TargetCacheAge.HIGH_PRIORITY, id="tile on home dashboard" + create_tile, + {"text_tile": True}, + TargetCacheAge.NO_CACHING, + id="tile with text", + ), + pytest.param( + create_tile, + {"on_home_dashboard": True}, + TargetCacheAge.HIGH_PRIORITY, + id="tile on home dashboard", ), pytest.param( create_tile, @@ -165,7 +227,10 @@ def create_tile( id="recently viewed tile (2)", ), pytest.param( - create_tile, {"viewed_at_delta": timedelta(days=20)}, TargetCacheAge.LOW_PRIORITY, id="tile viewed ages ago" + create_tile, + {"viewed_at_delta": timedelta(days=20)}, + TargetCacheAge.LOW_PRIORITY, + id="tile viewed ages ago", ), # cacheable types of query pytest.param( @@ -182,13 +247,19 @@ def create_tile( ), pytest.param( create_insight, - {"query": {"kind": "TimeToSeeDataSessionsQuery"}, "viewed_at_delta": timedelta(days=1)}, + { + "query": {"kind": "TimeToSeeDataSessionsQuery"}, + "viewed_at_delta": timedelta(days=1), + }, TargetCacheAge.MID_PRIORITY, id="insight with TimeToSeeDataSessionsQuery query viewed recently", ), pytest.param( create_insight, - {"query": {"kind": "TimeToSeeDataQuery"}, "viewed_at_delta": timedelta(days=1)}, + { + "query": {"kind": "TimeToSeeDataQuery"}, + "viewed_at_delta": timedelta(days=1), + }, TargetCacheAge.MID_PRIORITY, id="insight with TimeToSeeDataQuery query viewed recently", ), @@ -220,7 +291,12 @@ def create_tile( @pytest.mark.django_db @patch("posthog.caching.insight_caching_state.active_teams") def test_calculate_target_age( - mock_active_teams, team: Team, user: User, create_item, create_item_kw: Dict, expected_target_age: TargetCacheAge + mock_active_teams, + team: Team, + user: User, + create_item, + create_item_kw: Dict, + expected_target_age: TargetCacheAge, ): item = cast( Union[Insight, DashboardTile], diff --git a/posthog/caching/test/test_should_refresh_insight.py b/posthog/caching/test/test_should_refresh_insight.py index 12fb385ef2926..9c8932cd61e19 100644 --- a/posthog/caching/test/test_should_refresh_insight.py +++ b/posthog/caching/test/test_should_refresh_insight.py @@ -8,7 +8,10 @@ from rest_framework.request import Request from posthog.caching.calculate_results import CLICKHOUSE_MAX_EXECUTION_TIME from posthog.caching.insight_caching_state import InsightCachingState -from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, should_refresh_insight +from posthog.caching.insights_api import ( + BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, + should_refresh_insight, +) from posthog.test.base import BaseTest, ClickhouseTestMixin, _create_insight @@ -97,7 +100,9 @@ def test_insights_with_hour_intervals_can_be_refreshed_more_often(self): @freeze_time("2012-01-14T03:21:34.000Z") def test_insights_with_ranges_lower_than_7_days_can_be_refreshed_more_often(self): insight, _, _ = _create_insight( - self.team, {"events": [{"id": "$pageview"}], "interval": "day", "date_from": "-3d"}, {} + self.team, + {"events": [{"id": "$pageview"}], "interval": "day", "date_from": "-3d"}, + {}, ) should_refresh_now, refresh_frequency = should_refresh_insight(insight, None, request=self.refresh_request) @@ -116,7 +121,9 @@ def test_insights_with_ranges_lower_than_7_days_can_be_refreshed_more_often(self @freeze_time("2012-01-14T03:21:34.000Z") def test_dashboard_filters_should_override_insight_filters_when_deciding_on_refresh_time(self): insight, _, dashboard_tile = _create_insight( - self.team, {"events": [{"id": "$pageview"}], "interval": "month"}, {"interval": "hour"} + self.team, + {"events": [{"id": "$pageview"}], "interval": "month"}, + {"interval": "hour"}, ) should_refresh_now, refresh_frequency = should_refresh_insight( diff --git a/posthog/caching/test/test_tolerant_zlib_compressor.py b/posthog/caching/test/test_tolerant_zlib_compressor.py index 3f895f244b49e..acefa330fe228 100644 --- a/posthog/caching/test/test_tolerant_zlib_compressor.py +++ b/posthog/caching/test/test_tolerant_zlib_compressor.py @@ -15,8 +15,18 @@ class TestTolerantZlibCompressor(TestCase): @parameterized.expand( [ - ("test_when_disabled_compress_is_the_identity", False, uncompressed_bytes, uncompressed_bytes), - ("test_when_enabled_can_compress", True, uncompressed_bytes, compressed_bytes), + ( + "test_when_disabled_compress_is_the_identity", + False, + uncompressed_bytes, + uncompressed_bytes, + ), + ( + "test_when_enabled_can_compress", + True, + uncompressed_bytes, + compressed_bytes, + ), ( "test_when_enabled_does_not_compress_small_values", True, @@ -32,9 +42,24 @@ def test_the_zlib_compressor_compression(self, _, setting: bool, input: bytes, o @parameterized.expand( [ - ("test_when_disabled_decompress_is_the_identity", False, uncompressed_bytes, uncompressed_bytes), - ("test_when_enabled_can_decompress", True, compressed_bytes, uncompressed_bytes), - ("test_when_disabled_can_still_decompress", False, compressed_bytes, uncompressed_bytes), + ( + "test_when_disabled_decompress_is_the_identity", + False, + uncompressed_bytes, + uncompressed_bytes, + ), + ( + "test_when_enabled_can_decompress", + True, + compressed_bytes, + uncompressed_bytes, + ), + ( + "test_when_disabled_can_still_decompress", + False, + compressed_bytes, + uncompressed_bytes, + ), ] ) def test_the_zlib_compressor_decompression(self, _, setting: bool, input: bytes, output: bytes) -> None: diff --git a/posthog/caching/utils.py b/posthog/caching/utils.py index 45ff4ba640968..636fdbb19c53e 100644 --- a/posthog/caching/utils.py +++ b/posthog/caching/utils.py @@ -56,7 +56,10 @@ def active_teams() -> Set[int]: ) if not teams_by_recency: return set() - redis.zadd(RECENTLY_ACCESSED_TEAMS_REDIS_KEY, {team: score for team, score in teams_by_recency}) + redis.zadd( + RECENTLY_ACCESSED_TEAMS_REDIS_KEY, + {team: score for team, score in teams_by_recency}, + ) redis.expire(RECENTLY_ACCESSED_TEAMS_REDIS_KEY, IN_A_DAY) all_teams = teams_by_recency @@ -71,7 +74,10 @@ def stale_cache_invalidation_disabled(team: Team) -> bool: str(team.uuid), groups={"organization": str(team.organization.id)}, group_properties={ - "organization": {"id": str(team.organization.id), "created_at": team.organization.created_at} + "organization": { + "id": str(team.organization.id), + "created_at": team.organization.created_at, + } }, only_evaluate_locally=True, send_feature_flag_events=False, @@ -81,7 +87,9 @@ def stale_cache_invalidation_disabled(team: Team) -> bool: def is_stale_filter( - team: Team, filter: Filter | RetentionFilter | StickinessFilter | PathFilter, cached_result: Any + team: Team, + filter: Filter | RetentionFilter | StickinessFilter | PathFilter, + cached_result: Any, ) -> bool: interval = filter.period.lower() if isinstance(filter, RetentionFilter) else filter.interval return is_stale(team, filter.date_to, interval, cached_result) diff --git a/posthog/celery.py b/posthog/celery.py index fb9043f56467a..1eb5bb40db888 100644 --- a/posthog/celery.py +++ b/posthog/celery.py @@ -104,7 +104,10 @@ def on_worker_start(**kwargs) -> None: def add_periodic_task_with_expiry( - sender: Celery, schedule_seconds: int, task_signature: Signature, name: str | None = None + sender: Celery, + schedule_seconds: int, + task_signature: Signature, + name: str | None = None, ): """ If the workers get delayed in processing tasks, then tasks that fire every X seconds get queued multiple times @@ -125,7 +128,10 @@ def add_periodic_task_with_expiry( def setup_periodic_tasks(sender: Celery, **kwargs): # Monitoring tasks add_periodic_task_with_expiry( - sender, 60, monitoring_check_clickhouse_schema_drift.s(), "check clickhouse schema drift" + sender, + 60, + monitoring_check_clickhouse_schema_drift.s(), + "check clickhouse schema drift", ) if not settings.DEBUG: @@ -136,15 +142,22 @@ def setup_periodic_tasks(sender: Celery, **kwargs): # Update events table partitions twice a week sender.add_periodic_task( - crontab(day_of_week="mon,fri", hour="0", minute="0"), update_event_partitions.s() # check twice a week + crontab(day_of_week="mon,fri", hour="0", minute="0"), + update_event_partitions.s(), # check twice a week ) # Send all instance usage to the Billing service sender.add_periodic_task( - crontab(hour="0", minute="5"), send_org_usage_reports.s(), name="send instance usage report" + crontab(hour="0", minute="5"), + send_org_usage_reports.s(), + name="send instance usage report", ) # Update local usage info for rate limiting purposes - offset by 30 minutes to not clash with the above - sender.add_periodic_task(crontab(hour="*", minute="30"), update_quota_limiting.s(), name="update quota limiting") + sender.add_periodic_task( + crontab(hour="*", minute="30"), + update_quota_limiting.s(), + name="update quota limiting", + ) # PostHog Cloud cron jobs # NOTE: We can't use is_cloud here as some Django elements aren't loaded yet. We check in the task execution instead @@ -152,7 +165,11 @@ def setup_periodic_tasks(sender: Celery, **kwargs): sender.add_periodic_task(crontab(hour="4", minute="0"), verify_persons_data_in_sync.s()) # Every 30 minutes, send decide request counts to the main posthog instance - sender.add_periodic_task(crontab(minute="*/30"), calculate_decide_usage.s(), name="calculate decide usage") + sender.add_periodic_task( + crontab(minute="*/30"), + calculate_decide_usage.s(), + name="calculate decide usage", + ) # Reset master project data every Monday at Thursday at 5 AM UTC. Mon and Thu because doing this every day # would be too hard on ClickHouse, and those days ensure most users will have data at most 3 days old. @@ -166,7 +183,9 @@ def setup_periodic_tasks(sender: Celery, **kwargs): sync_insight_cache_states_schedule = get_crontab(settings.SYNC_INSIGHT_CACHE_STATES_SCHEDULE) if sync_insight_cache_states_schedule: sender.add_periodic_task( - sync_insight_cache_states_schedule, sync_insight_cache_states_task.s(), name="sync insight cache states" + sync_insight_cache_states_schedule, + sync_insight_cache_states_task.s(), + name="sync insight cache states", ) add_periodic_task_with_expiry( @@ -226,7 +245,9 @@ def setup_periodic_tasks(sender: Celery, **kwargs): name="PG table cache hit rate", ) sender.add_periodic_task( - crontab(minute="0", hour="*"), pg_plugin_server_query_timing.s(), name="PG plugin server query timing" + crontab(minute="0", hour="*"), + pg_plugin_server_query_timing.s(), + name="PG plugin server query timing", ) add_periodic_task_with_expiry( sender, @@ -244,7 +265,9 @@ def setup_periodic_tasks(sender: Celery, **kwargs): if clear_clickhouse_crontab := get_crontab(settings.CLEAR_CLICKHOUSE_REMOVED_DATA_SCHEDULE_CRON): sender.add_periodic_task( - clear_clickhouse_crontab, clickhouse_clear_removed_data.s(), name="clickhouse clear removed data" + clear_clickhouse_crontab, + clickhouse_clear_removed_data.s(), + name="clickhouse clear removed data", ) if clear_clickhouse_deleted_person_crontab := get_crontab(settings.CLEAR_CLICKHOUSE_DELETED_PERSON_SCHEDULE_CRON): @@ -256,17 +279,21 @@ def setup_periodic_tasks(sender: Celery, **kwargs): if settings.EE_AVAILABLE: sender.add_periodic_task( - crontab(hour="0", minute=str(randrange(0, 40))), clickhouse_send_license_usage.s() + crontab(hour="0", minute=str(randrange(0, 40))), + clickhouse_send_license_usage.s(), ) # every day at a random minute past midnight. Randomize to avoid overloading license.posthog.com sender.add_periodic_task( - crontab(hour="4", minute=str(randrange(0, 40))), clickhouse_send_license_usage.s() + crontab(hour="4", minute=str(randrange(0, 40))), + clickhouse_send_license_usage.s(), ) # again a few hours later just to make sure materialize_columns_crontab = get_crontab(settings.MATERIALIZE_COLUMNS_SCHEDULE_CRON) if materialize_columns_crontab: sender.add_periodic_task( - materialize_columns_crontab, clickhouse_materialize_columns.s(), name="clickhouse materialize columns" + materialize_columns_crontab, + clickhouse_materialize_columns.s(), + name="clickhouse materialize columns", ) sender.add_periodic_task( @@ -276,7 +303,10 @@ def setup_periodic_tasks(sender: Celery, **kwargs): ) sender.add_periodic_task(crontab(hour="*", minute="55"), schedule_all_subscriptions.s()) - sender.add_periodic_task(crontab(hour="2", minute=str(randrange(0, 40))), ee_persist_finished_recordings.s()) + sender.add_periodic_task( + crontab(hour="2", minute=str(randrange(0, 40))), + ee_persist_finished_recordings.s(), + ) sender.add_periodic_task( crontab(minute="0", hour="*"), @@ -303,7 +333,10 @@ def setup_periodic_tasks(sender: Celery, **kwargs): def pre_run_signal_handler(task_id, task, **kwargs): from statshog.defaults.django import statsd - from posthog.clickhouse.client.connection import Workload, set_default_clickhouse_workload_type + from posthog.clickhouse.client.connection import ( + Workload, + set_default_clickhouse_workload_type, + ) from posthog.clickhouse.query_tagging import tag_queries statsd.incr("celery_tasks_metrics.pre_run", tags={"name": task.name}) @@ -359,7 +392,15 @@ def enqueue_clickhouse_execute_with_progress( """ from posthog.client import execute_with_progress - execute_with_progress(team_id, query_id, query, args, settings, with_column_types, task_id=self.request.id) + execute_with_progress( + team_id, + query_id, + query, + args, + settings, + with_column_types, + task_id=self.request.id, + ) @app.task(ignore_result=True) @@ -425,7 +466,9 @@ def pg_plugin_server_query_timing(): if key == "query_type": continue statsd.gauge( - f"pg_plugin_server_query_{key}", value, tags={"query_type": row_dictionary["query_type"]} + f"pg_plugin_server_query_{key}", + value, + tags={"query_type": row_dictionary["query_type"]}, ) except: # if this doesn't work keep going @@ -457,7 +500,13 @@ def pg_row_count(): pass -CLICKHOUSE_TABLES = ["events", "person", "person_distinct_id2", "session_replay_events", "log_entries"] +CLICKHOUSE_TABLES = [ + "events", + "person", + "person_distinct_id2", + "session_replay_events", + "log_entries", +] if not is_cloud(): CLICKHOUSE_TABLES.append("session_recording_events") @@ -482,7 +531,11 @@ def clickhouse_lag(): ) query = QUERY.format(table=table) lag = sync_execute(query)[0][2] - statsd.gauge("posthog_celery_clickhouse__table_lag_seconds", lag, tags={"table": table}) + statsd.gauge( + "posthog_celery_clickhouse__table_lag_seconds", + lag, + tags={"table": table}, + ) lag_gauge.labels(table_name=table).set(lag) except: pass @@ -535,7 +588,12 @@ def ingestion_lag(): pass -KNOWN_CELERY_TASK_IDENTIFIERS = {"pluginJob", "runEveryHour", "runEveryMinute", "runEveryDay"} +KNOWN_CELERY_TASK_IDENTIFIERS = { + "pluginJob", + "runEveryHour", + "runEveryMinute", + "runEveryDay", +} @app.task(ignore_result=True) @@ -588,7 +646,11 @@ def graphile_worker_queue_size(): seen_task_identifier.add(task_identifier) waiting_jobs_gauge.labels(task_identifier=task_identifier).set(count) processing_lag_gauge.labels(task_identifier=task_identifier).set(time.time() - float(oldest)) - statsd.gauge("graphile_waiting_jobs", count, tags={"task_identifier": task_identifier}) + statsd.gauge( + "graphile_waiting_jobs", + count, + tags={"task_identifier": task_identifier}, + ) # The query will not return rows for empty queues, creating missing points. # Let's emit updates for known queues even if they are empty. @@ -618,7 +680,11 @@ def clickhouse_row_count(): query = QUERY.format(table=table) rows = sync_execute(query)[0][0] row_count_gauge.labels(table_name=table).set(rows) - statsd.gauge(f"posthog_celery_clickhouse_table_row_count", rows, tags={"table": table}) + statsd.gauge( + f"posthog_celery_clickhouse_table_row_count", + rows, + tags={"table": table}, + ) except: pass @@ -681,7 +747,11 @@ def clickhouse_part_count(): ) for table, parts in rows: parts_count_gauge.labels(table=table).set(parts) - statsd.gauge(f"posthog_celery_clickhouse_table_parts_count", parts, tags={"table": table}) + statsd.gauge( + f"posthog_celery_clickhouse_table_parts_count", + parts, + tags={"table": table}, + ) @app.task(ignore_result=True) @@ -710,7 +780,11 @@ def clickhouse_mutation_count(): ) for table, muts in rows: mutations_count_gauge.labels(table=table).set(muts) - statsd.gauge(f"posthog_celery_clickhouse_table_mutations_count", muts, tags={"table": table}) + statsd.gauge( + f"posthog_celery_clickhouse_table_mutations_count", + muts, + tags={"table": table}, + ) @app.task(ignore_result=True) @@ -739,7 +813,9 @@ def redis_celery_queue_depth(): try: with pushed_metrics_registry("redis_celery_queue_depth_registry") as registry: celery_task_queue_depth_gauge = Gauge( - "posthog_celery_queue_depth", "We use this to monitor the depth of the celery queue.", registry=registry + "posthog_celery_queue_depth", + "We use this to monitor the depth of the celery queue.", + registry=registry, ) llen = get_client().llen("celery") @@ -767,7 +843,9 @@ def clean_stale_partials(): @app.task(ignore_result=True) def monitoring_check_clickhouse_schema_drift(): - from posthog.tasks.check_clickhouse_schema_drift import check_clickhouse_schema_drift + from posthog.tasks.check_clickhouse_schema_drift import ( + check_clickhouse_schema_drift, + ) check_clickhouse_schema_drift() @@ -801,7 +879,11 @@ def update_cache_task(caching_state_id: UUID): @app.task(ignore_result=True) -def sync_insight_caching_state(team_id: int, insight_id: Optional[int] = None, dashboard_tile_id: Optional[int] = None): +def sync_insight_caching_state( + team_id: int, + insight_id: Optional[int] = None, + dashboard_tile_id: Optional[int] = None, +): from posthog.caching.insight_caching_state import sync_insight_caching_state sync_insight_caching_state(team_id, insight_id, dashboard_tile_id) @@ -851,7 +933,9 @@ def calculate_decide_usage() -> None: def find_flags_with_enriched_analytics(): from datetime import datetime, timedelta - from posthog.models.feature_flag.flag_analytics import find_flags_with_enriched_analytics + from posthog.models.feature_flag.flag_analytics import ( + find_flags_with_enriched_analytics, + ) end = datetime.now() begin = end - timedelta(hours=12) @@ -869,7 +953,9 @@ def demo_reset_master_team(): @app.task(ignore_result=True) def sync_all_organization_available_features(): - from posthog.tasks.sync_all_organization_available_features import sync_all_organization_available_features + from posthog.tasks.sync_all_organization_available_features import ( + sync_all_organization_available_features, + ) sync_all_organization_available_features() @@ -883,7 +969,9 @@ def check_async_migration_health(): @app.task(ignore_result=True) def verify_persons_data_in_sync(): - from posthog.tasks.verify_persons_data_in_sync import verify_persons_data_in_sync as verify + from posthog.tasks.verify_persons_data_in_sync import ( + verify_persons_data_in_sync as verify, + ) if not is_cloud(): return @@ -905,7 +993,9 @@ def recompute_materialized_columns_enabled() -> bool: def clickhouse_materialize_columns(): if recompute_materialized_columns_enabled(): try: - from ee.clickhouse.materialized_columns.analyze import materialize_properties_task + from ee.clickhouse.materialized_columns.analyze import ( + materialize_properties_task, + ) except ImportError: pass else: @@ -943,7 +1033,9 @@ def update_quota_limiting(): @app.task(ignore_result=True) def schedule_all_subscriptions(): try: - from ee.tasks.subscriptions import schedule_all_subscriptions as _schedule_all_subscriptions + from ee.tasks.subscriptions import ( + schedule_all_subscriptions as _schedule_all_subscriptions, + ) except ImportError: pass else: diff --git a/posthog/clickhouse/client/connection.py b/posthog/clickhouse/client/connection.py index 8cf665d857c60..fbbfd08086822 100644 --- a/posthog/clickhouse/client/connection.py +++ b/posthog/clickhouse/client/connection.py @@ -30,7 +30,10 @@ def get_pool(workload: Workload, team_id=None, readonly=False): # Note that `readonly` does nothing if the relevant vars are not set! if readonly and settings.READONLY_CLICKHOUSE_USER is not None and settings.READONLY_CLICKHOUSE_PASSWORD: - return make_ch_pool(user=settings.READONLY_CLICKHOUSE_USER, password=settings.READONLY_CLICKHOUSE_PASSWORD) + return make_ch_pool( + user=settings.READONLY_CLICKHOUSE_USER, + password=settings.READONLY_CLICKHOUSE_PASSWORD, + ) if ( workload == Workload.OFFLINE or workload == Workload.DEFAULT and _default_workload == Workload.OFFLINE diff --git a/posthog/clickhouse/client/execute.py b/posthog/clickhouse/client/execute.py index 60cad345fcaa7..5f039c78c19f9 100644 --- a/posthog/clickhouse/client/execute.py +++ b/posthog/clickhouse/client/execute.py @@ -40,7 +40,10 @@ @lru_cache(maxsize=1) def default_settings() -> Dict: - return {"join_algorithm": "direct,parallel_hash", "distributed_replica_max_ignored_errors": 1000} + return { + "join_algorithm": "direct,parallel_hash", + "distributed_replica_max_ignored_errors": 1000, + } @lru_cache(maxsize=1) @@ -81,7 +84,7 @@ def sync_execute( from posthog.test.base import flush_persons_and_events flush_persons_and_events() - except ModuleNotFoundError: # when we run plugin server tests it tries to run above, ignore + except (ModuleNotFoundError): # when we run plugin server tests it tries to run above, ignore pass with get_pool(workload, team_id, readonly).get_client() as client: @@ -91,7 +94,10 @@ def sync_execute( query_id = validated_client_query_id() core_settings = {**default_settings(), **(settings or {})} tags["query_settings"] = core_settings - settings = {**core_settings, "log_comment": json.dumps(tags, separators=(",", ":"))} + settings = { + **core_settings, + "log_comment": json.dumps(tags, separators=(",", ":")), + } try: result = client.execute( prepared_sql, @@ -102,7 +108,10 @@ def sync_execute( ) except Exception as err: err = wrap_query_error(err) - statsd.incr("clickhouse_sync_execution_failure", tags={"failed": True, "reason": type(err).__name__}) + statsd.incr( + "clickhouse_sync_execution_failure", + tags={"failed": True, "reason": type(err).__name__}, + ) raise err finally: @@ -147,7 +156,12 @@ def query_with_columns( @patchable -def _prepare_query(client: SyncClient, query: str, args: QueryArgs, workload: Workload = Workload.DEFAULT): +def _prepare_query( + client: SyncClient, + query: str, + args: QueryArgs, + workload: Workload = Workload.DEFAULT, +): """ Given a string query with placeholders we do one of two things: @@ -219,7 +233,9 @@ def format_sql(rendered_sql, colorize=True): import pygments.lexers return pygments.highlight( - formatted_sql, pygments.lexers.get_lexer_by_name("sql"), pygments.formatters.TerminalFormatter() + formatted_sql, + pygments.lexers.get_lexer_by_name("sql"), + pygments.formatters.TerminalFormatter(), ) except: pass diff --git a/posthog/clickhouse/client/execute_async.py b/posthog/clickhouse/client/execute_async.py index 89de42427f568..3bb28c3f20075 100644 --- a/posthog/clickhouse/client/execute_async.py +++ b/posthog/clickhouse/client/execute_async.py @@ -49,7 +49,14 @@ def generate_redis_results_key(query_id): def execute_with_progress( - team_id, query_id, query, args=None, settings=None, with_column_types=False, update_freq=0.2, task_id=None + team_id, + query_id, + query, + args=None, + settings=None, + with_column_types=False, + update_freq=0.2, + task_id=None, ): """ Kick off query with progress reporting @@ -81,7 +88,10 @@ def execute_with_progress( try: progress = ch_client.execute_with_progress( - prepared_sql, params=prepared_args, settings=settings, with_column_types=with_column_types + prepared_sql, + params=prepared_args, + settings=settings, + with_column_types=with_column_types, ) for num_rows, total_rows in progress: query_status = QueryStatus( @@ -145,7 +155,14 @@ def execute_with_progress( def enqueue_execute_with_progress( - team_id, query, args=None, settings=None, with_column_types=False, bypass_celery=False, query_id=None, force=False + team_id, + query, + args=None, + settings=None, + with_column_types=False, + bypass_celery=False, + query_id=None, + force=False, ): if not query_id: query_id = _query_hash(query, team_id, args) diff --git a/posthog/clickhouse/client/test/test_connection.py b/posthog/clickhouse/client/test/test_connection.py index d40e544bf16fc..e05a87b84e60c 100644 --- a/posthog/clickhouse/client/test/test_connection.py +++ b/posthog/clickhouse/client/test/test_connection.py @@ -1,6 +1,11 @@ import pytest -from posthog.clickhouse.client.connection import Workload, get_pool, make_ch_pool, set_default_clickhouse_workload_type +from posthog.clickhouse.client.connection import ( + Workload, + get_pool, + make_ch_pool, + set_default_clickhouse_workload_type, +) def test_connection_pool_creation_without_offline_cluster(settings): diff --git a/posthog/clickhouse/dead_letter_queue.py b/posthog/clickhouse/dead_letter_queue.py index 53896bbfa6869..298d99e4ed88b 100644 --- a/posthog/clickhouse/dead_letter_queue.py +++ b/posthog/clickhouse/dead_letter_queue.py @@ -86,7 +86,9 @@ _offset FROM {database}.kafka_{table_name} """.format( - table_name=DEAD_LETTER_QUEUE_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE + table_name=DEAD_LETTER_QUEUE_TABLE, + cluster=CLICKHOUSE_CLUSTER, + database=CLICKHOUSE_DATABASE, ) diff --git a/posthog/clickhouse/log_entries.py b/posthog/clickhouse/log_entries.py index 017ee408aea44..471ca18eac7fb 100644 --- a/posthog/clickhouse/log_entries.py +++ b/posthog/clickhouse/log_entries.py @@ -69,7 +69,9 @@ _offset FROM {database}.kafka_{table_name} """.format( - table_name=LOG_ENTRIES_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE + table_name=LOG_ENTRIES_TABLE, + cluster=CLICKHOUSE_CLUSTER, + database=CLICKHOUSE_DATABASE, ) diff --git a/posthog/clickhouse/migrations/0003_person.py b/posthog/clickhouse/migrations/0003_person.py index d780fd855ec8b..ccdcf428de43b 100644 --- a/posthog/clickhouse/migrations/0003_person.py +++ b/posthog/clickhouse/migrations/0003_person.py @@ -1,5 +1,9 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions -from posthog.models.person.sql import COMMENT_DISTINCT_ID_COLUMN_SQL, PERSONS_DISTINCT_ID_TABLE_SQL, PERSONS_TABLE_SQL +from posthog.models.person.sql import ( + COMMENT_DISTINCT_ID_COLUMN_SQL, + PERSONS_DISTINCT_ID_TABLE_SQL, + PERSONS_TABLE_SQL, +) operations = [ run_sql_with_exceptions(PERSONS_TABLE_SQL()), diff --git a/posthog/clickhouse/migrations/0004_kafka.py b/posthog/clickhouse/migrations/0004_kafka.py index 5243e206bd098..857398c2a3cc7 100644 --- a/posthog/clickhouse/migrations/0004_kafka.py +++ b/posthog/clickhouse/migrations/0004_kafka.py @@ -1,5 +1,8 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions -from posthog.models.event.sql import DISTRIBUTED_EVENTS_TABLE_SQL, WRITABLE_EVENTS_TABLE_SQL +from posthog.models.event.sql import ( + DISTRIBUTED_EVENTS_TABLE_SQL, + WRITABLE_EVENTS_TABLE_SQL, +) from posthog.models.person.sql import ( KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL, KAFKA_PERSONS_TABLE_SQL, diff --git a/posthog/clickhouse/migrations/0012_person_id_deleted_column.py b/posthog/clickhouse/migrations/0012_person_id_deleted_column.py index ef324ce2417f9..40a3a0a0ef4f6 100644 --- a/posthog/clickhouse/migrations/0012_person_id_deleted_column.py +++ b/posthog/clickhouse/migrations/0012_person_id_deleted_column.py @@ -1,5 +1,8 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions -from posthog.models.person.sql import KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL, PERSONS_DISTINCT_ID_TABLE_MV_SQL +from posthog.models.person.sql import ( + KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL, + PERSONS_DISTINCT_ID_TABLE_MV_SQL, +) from posthog.settings import CLICKHOUSE_CLUSTER operations = [ diff --git a/posthog/clickhouse/migrations/0018_group_analytics_schema.py b/posthog/clickhouse/migrations/0018_group_analytics_schema.py index 69e923f8b7989..05cf74d0c24ae 100644 --- a/posthog/clickhouse/migrations/0018_group_analytics_schema.py +++ b/posthog/clickhouse/migrations/0018_group_analytics_schema.py @@ -1,5 +1,9 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions -from posthog.models.group.sql import GROUPS_TABLE_MV_SQL, GROUPS_TABLE_SQL, KAFKA_GROUPS_TABLE_SQL +from posthog.models.group.sql import ( + GROUPS_TABLE_MV_SQL, + GROUPS_TABLE_SQL, + KAFKA_GROUPS_TABLE_SQL, +) operations = [ run_sql_with_exceptions(GROUPS_TABLE_SQL()), diff --git a/posthog/clickhouse/migrations/0023_dead_letter_queue_tags.py b/posthog/clickhouse/migrations/0023_dead_letter_queue_tags.py index f34752a660a28..cce6212290056 100644 --- a/posthog/clickhouse/migrations/0023_dead_letter_queue_tags.py +++ b/posthog/clickhouse/migrations/0023_dead_letter_queue_tags.py @@ -1,5 +1,8 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions -from posthog.clickhouse.dead_letter_queue import DEAD_LETTER_QUEUE_TABLE_MV_SQL, KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL +from posthog.clickhouse.dead_letter_queue import ( + DEAD_LETTER_QUEUE_TABLE_MV_SQL, + KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL, +) from posthog.settings import CLICKHOUSE_CLUSTER operations = [ diff --git a/posthog/clickhouse/migrations/0025_json_events.py b/posthog/clickhouse/migrations/0025_json_events.py index 1dd452dff732e..fd8056b227123 100644 --- a/posthog/clickhouse/migrations/0025_json_events.py +++ b/posthog/clickhouse/migrations/0025_json_events.py @@ -1,5 +1,8 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions -from posthog.models.event.sql import EVENTS_TABLE_JSON_MV_SQL, KAFKA_EVENTS_TABLE_JSON_SQL +from posthog.models.event.sql import ( + EVENTS_TABLE_JSON_MV_SQL, + KAFKA_EVENTS_TABLE_JSON_SQL, +) operations = [ run_sql_with_exceptions(KAFKA_EVENTS_TABLE_JSON_SQL()), diff --git a/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py b/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py index d2fc6a7d4bac9..b27c8ad29f59a 100644 --- a/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py +++ b/posthog/clickhouse/migrations/0026_fix_materialized_window_and_session_ids.py @@ -1,6 +1,9 @@ from infi.clickhouse_orm import migrations -from posthog.clickhouse.materialized_columns import get_materialized_columns, materialize +from posthog.clickhouse.materialized_columns import ( + get_materialized_columns, + materialize, +) from posthog.client import sync_execute from posthog.settings import CLICKHOUSE_CLUSTER @@ -38,7 +41,6 @@ def ensure_only_new_column_exists(database, table_name, old_column_name, new_col def materialize_session_and_window_id(database): - properties = ["$session_id", "$window_id"] for property_name in properties: materialized_columns = get_materialized_columns("events", use_cache=False) diff --git a/posthog/clickhouse/migrations/0027_persons_and_groups_on_events.py b/posthog/clickhouse/migrations/0027_persons_and_groups_on_events.py index 500d2e1184f4b..534a2d6dbf01c 100644 --- a/posthog/clickhouse/migrations/0027_persons_and_groups_on_events.py +++ b/posthog/clickhouse/migrations/0027_persons_and_groups_on_events.py @@ -2,7 +2,10 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions from posthog.client import sync_execute -from posthog.models.event.sql import EVENTS_TABLE_JSON_MV_SQL, KAFKA_EVENTS_TABLE_JSON_SQL +from posthog.models.event.sql import ( + EVENTS_TABLE_JSON_MV_SQL, + KAFKA_EVENTS_TABLE_JSON_SQL, +) from posthog.settings import CLICKHOUSE_CLUSTER ADD_COLUMNS_BASE_SQL = """ diff --git a/posthog/clickhouse/migrations/0028_dead_letter_queue_settings.py b/posthog/clickhouse/migrations/0028_dead_letter_queue_settings.py index fd8676e47b74e..ff7746fe8e326 100644 --- a/posthog/clickhouse/migrations/0028_dead_letter_queue_settings.py +++ b/posthog/clickhouse/migrations/0028_dead_letter_queue_settings.py @@ -1,5 +1,8 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions -from posthog.clickhouse.dead_letter_queue import DEAD_LETTER_QUEUE_TABLE_MV_SQL, KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL +from posthog.clickhouse.dead_letter_queue import ( + DEAD_LETTER_QUEUE_TABLE_MV_SQL, + KAFKA_DEAD_LETTER_QUEUE_TABLE_SQL, +) from posthog.settings.data_stores import CLICKHOUSE_CLUSTER operations = [ diff --git a/posthog/clickhouse/migrations/0030_created_at_persons_and_groups_on_events.py b/posthog/clickhouse/migrations/0030_created_at_persons_and_groups_on_events.py index a68f39422ab65..254ff78a531ff 100644 --- a/posthog/clickhouse/migrations/0030_created_at_persons_and_groups_on_events.py +++ b/posthog/clickhouse/migrations/0030_created_at_persons_and_groups_on_events.py @@ -2,7 +2,10 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions from posthog.client import sync_execute -from posthog.models.event.sql import EVENTS_TABLE_JSON_MV_SQL, KAFKA_EVENTS_TABLE_JSON_SQL +from posthog.models.event.sql import ( + EVENTS_TABLE_JSON_MV_SQL, + KAFKA_EVENTS_TABLE_JSON_SQL, +) from posthog.settings import CLICKHOUSE_CLUSTER ADD_COLUMNS_BASE_SQL = """ diff --git a/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py b/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py index be819a0111a01..d6705db02eb33 100644 --- a/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py +++ b/posthog/clickhouse/migrations/0036_session_recording_events_materialized_columns.py @@ -1,12 +1,13 @@ from infi.clickhouse_orm import migrations from posthog.client import sync_execute -from posthog.session_recordings.sql.session_recording_event_sql import MATERIALIZED_COLUMNS +from posthog.session_recordings.sql.session_recording_event_sql import ( + MATERIALIZED_COLUMNS, +) from posthog.settings import CLICKHOUSE_CLUSTER def create_events_summary_mat_columns(database): - columns_to_add = [ "events_summary", "click_count", diff --git a/posthog/clickhouse/migrations/0042_kafka_partitions_stats.py b/posthog/clickhouse/migrations/0042_kafka_partitions_stats.py index afc8e898f1327..1a588a1092474 100644 --- a/posthog/clickhouse/migrations/0042_kafka_partitions_stats.py +++ b/posthog/clickhouse/migrations/0042_kafka_partitions_stats.py @@ -1,5 +1,8 @@ from posthog.clickhouse.client.migration_tools import run_sql_with_exceptions -from posthog.kafka_client.topics import KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW, KAFKA_SESSION_RECORDING_EVENTS +from posthog.kafka_client.topics import ( + KAFKA_EVENTS_PLUGIN_INGESTION_OVERFLOW, + KAFKA_SESSION_RECORDING_EVENTS, +) from posthog.models.kafka_partition_stats.sql import ( CREATE_PARTITION_STATISTICS_KAFKA_TABLE, CREATE_PARTITION_STATISTICS_MV, diff --git a/posthog/clickhouse/plugin_log_entries.py b/posthog/clickhouse/plugin_log_entries.py index cb03e34eb3471..1f4f7c70d7146 100644 --- a/posthog/clickhouse/plugin_log_entries.py +++ b/posthog/clickhouse/plugin_log_entries.py @@ -61,7 +61,9 @@ _offset FROM {database}.kafka_{table_name} """.format( - table_name=PLUGIN_LOG_ENTRIES_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE + table_name=PLUGIN_LOG_ENTRIES_TABLE, + cluster=CLICKHOUSE_CLUSTER, + database=CLICKHOUSE_DATABASE, ) diff --git a/posthog/clickhouse/system_status.py b/posthog/clickhouse/system_status.py index 2317e41c39e1d..bd9bd22f427c6 100644 --- a/posthog/clickhouse/system_status.py +++ b/posthog/clickhouse/system_status.py @@ -6,10 +6,17 @@ from dateutil.relativedelta import relativedelta from django.utils import timezone -from posthog.api.dead_letter_queue import get_dead_letter_queue_events_last_24h, get_dead_letter_queue_size +from posthog.api.dead_letter_queue import ( + get_dead_letter_queue_events_last_24h, + get_dead_letter_queue_size, +) from posthog.cache_utils import cache_for from posthog.client import query_with_columns, sync_execute -from posthog.models.event.util import get_event_count, get_event_count_for_last_month, get_event_count_month_to_date +from posthog.models.event.util import ( + get_event_count, + get_event_count_for_last_month, + get_event_count_month_to_date, +) from posthog.session_recordings.models.system_status_queries import ( get_recording_status_month_to_date, ) @@ -25,12 +32,20 @@ def system_status() -> Generator[SystemStatusRow, None, None]: alive = is_alive() - yield {"key": "clickhouse_alive", "metric": "Clickhouse database alive", "value": alive} + yield { + "key": "clickhouse_alive", + "metric": "Clickhouse database alive", + "value": alive, + } if not alive: return - yield {"key": "clickhouse_event_count", "metric": "Events in ClickHouse", "value": get_event_count()} + yield { + "key": "clickhouse_event_count", + "metric": "Events in ClickHouse", + "value": get_event_count(), + } yield { "key": "clickhouse_event_count_last_month", "metric": "Events recorded last month", @@ -67,8 +82,16 @@ def system_status() -> Generator[SystemStatusRow, None, None]: for index, (total_space, free_space) in enumerate(disk_status): metric = "Clickhouse disk" if len(disk_status) == 1 else f"Clickhouse disk {index}" - yield {"key": f"clickhouse_disk_{index}_free_space", "metric": f"{metric} free space", "value": free_space} - yield {"key": f"clickhouse_disk_{index}_total_space", "metric": f"{metric} total space", "value": total_space} + yield { + "key": f"clickhouse_disk_{index}_free_space", + "metric": f"{metric} free space", + "value": free_space, + } + yield { + "key": f"clickhouse_disk_{index}_total_space", + "metric": f"{metric} total space", + "value": total_space, + } table_sizes = sync_execute( """ @@ -97,7 +120,10 @@ def system_status() -> Generator[SystemStatusRow, None, None]: "key": "clickhouse_system_metrics", "metric": "Clickhouse system metrics", "value": "", - "subrows": {"columns": ["Metric", "Value", "Description"], "rows": list(sorted(system_metrics))}, + "subrows": { + "columns": ["Metric", "Value", "Description"], + "rows": list(sorted(system_metrics)), + }, } # This timestamp is a naive timestamp (does not include a timezone) @@ -121,9 +147,16 @@ def system_status() -> Generator[SystemStatusRow, None, None]: dead_letter_queue_size = get_dead_letter_queue_size() - yield {"key": "dead_letter_queue_size", "metric": "Dead letter queue size", "value": dead_letter_queue_size} + yield { + "key": "dead_letter_queue_size", + "metric": "Dead letter queue size", + "value": dead_letter_queue_size, + } - dead_letter_queue_events_high, dead_letter_queue_events_last_day = dead_letter_queue_ratio() + ( + dead_letter_queue_events_high, + dead_letter_queue_events_last_day, + ) = dead_letter_queue_ratio() yield { "key": "dead_letter_queue_events_last_day", diff --git a/posthog/clickhouse/test/test_person_overrides.py b/posthog/clickhouse/test/test_person_overrides.py index f0d33c7d617f4..dc8bc2b17c503 100644 --- a/posthog/clickhouse/test/test_person_overrides.py +++ b/posthog/clickhouse/test/test_person_overrides.py @@ -81,7 +81,14 @@ def test_can_insert_person_overrides(): assert results != [] [result] = results created_at, *the_rest = result - assert the_rest == [1, old_person_id, override_person_id, oldest_event, merged_at, 2] + assert the_rest == [ + 1, + old_person_id, + override_person_id, + oldest_event, + merged_at, + 2, + ] assert created_at > datetime.now(tz=ZoneInfo("UTC")) - timedelta(seconds=10) finally: producer.close() @@ -124,7 +131,8 @@ def test_person_overrides_dict(): sync_execute("INSERT INTO person_overrides (*) VALUES", [values]) sync_execute("SYSTEM RELOAD DICTIONARY person_overrides_dict") results = sync_execute( - "SELECT dictGet(person_overrides_dict, 'override_person_id', (%(team_id)s, %(old_person_id)s))", values + "SELECT dictGet(person_overrides_dict, 'override_person_id', (%(team_id)s, %(old_person_id)s))", + values, ) assert len(results) == 1 @@ -136,7 +144,8 @@ def test_person_overrides_dict(): sync_execute("INSERT INTO person_overrides (*) VALUES", [values]) sync_execute("SYSTEM RELOAD DICTIONARY person_overrides_dict") new_results = sync_execute( - "SELECT dictGet(person_overrides_dict, 'override_person_id', (%(team_id)s, %(old_person_id)s))", values + "SELECT dictGet(person_overrides_dict, 'override_person_id', (%(team_id)s, %(old_person_id)s))", + values, ) assert len(new_results) == 1 diff --git a/posthog/conftest.py b/posthog/conftest.py index 06e7e256aed79..2b819ff9390ad 100644 --- a/posthog/conftest.py +++ b/posthog/conftest.py @@ -11,7 +11,11 @@ def create_clickhouse_tables(num_tables: int): # Create clickhouse tables to default before running test # Mostly so that test runs locally work correctly - from posthog.clickhouse.schema import CREATE_DISTRIBUTED_TABLE_QUERIES, CREATE_MERGETREE_TABLE_QUERIES, build_query + from posthog.clickhouse.schema import ( + CREATE_DISTRIBUTED_TABLE_QUERIES, + CREATE_MERGETREE_TABLE_QUERIES, + build_query, + ) # REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY! CREATE_TABLE_QUERIES: Tuple[Any, ...] = CREATE_MERGETREE_TABLE_QUERIES + CREATE_DISTRIBUTED_TABLE_QUERIES @@ -27,8 +31,12 @@ def create_clickhouse_tables(num_tables: int): def reset_clickhouse_tables(): # Truncate clickhouse tables to default before running test # Mostly so that test runs locally work correctly - from posthog.clickhouse.dead_letter_queue import TRUNCATE_DEAD_LETTER_QUEUE_TABLE_SQL - from posthog.clickhouse.plugin_log_entries import TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL + from posthog.clickhouse.dead_letter_queue import ( + TRUNCATE_DEAD_LETTER_QUEUE_TABLE_SQL, + ) + from posthog.clickhouse.plugin_log_entries import ( + TRUNCATE_PLUGIN_LOG_ENTRIES_TABLE_SQL, + ) from posthog.models.app_metrics.sql import TRUNCATE_APP_METRICS_TABLE_SQL from posthog.models.cohort.sql import TRUNCATE_COHORTPEOPLE_TABLE_SQL from posthog.models.event.sql import TRUNCATE_EVENTS_TABLE_SQL @@ -40,7 +48,9 @@ def reset_clickhouse_tables(): TRUNCATE_PERSON_STATIC_COHORT_TABLE_SQL, TRUNCATE_PERSON_TABLE_SQL, ) - from posthog.session_recordings.sql.session_recording_event_sql import TRUNCATE_SESSION_RECORDING_EVENTS_TABLE_SQL + from posthog.session_recordings.sql.session_recording_event_sql import ( + TRUNCATE_SESSION_RECORDING_EVENTS_TABLE_SQL, + ) # REMEMBER TO ADD ANY NEW CLICKHOUSE TABLES TO THIS ARRAY! TABLES_TO_CREATE_DROP = [ @@ -80,7 +90,8 @@ def django_db_setup(django_db_setup, django_db_keepdb): database.create_database() # Create database if it doesn't exist table_count = sync_execute( - "SELECT count() FROM system.tables WHERE database = %(database)s", {"database": settings.CLICKHOUSE_DATABASE} + "SELECT count() FROM system.tables WHERE database = %(database)s", + {"database": settings.CLICKHOUSE_DATABASE}, )[0][0] create_clickhouse_tables(table_count) diff --git a/posthog/constants.py b/posthog/constants.py index 3beb8ca12b3b9..ecfeb03e1259f 100644 --- a/posthog/constants.py +++ b/posthog/constants.py @@ -55,7 +55,13 @@ class AvailableFeature(str, Enum): TRENDS_BOLD_NUMBER = "BoldNumber" # Sync with frontend NON_TIME_SERIES_DISPLAY_TYPES -NON_TIME_SERIES_DISPLAY_TYPES = [TRENDS_TABLE, TRENDS_PIE, TRENDS_BAR_VALUE, TRENDS_WORLD_MAP, TRENDS_BOLD_NUMBER] +NON_TIME_SERIES_DISPLAY_TYPES = [ + TRENDS_TABLE, + TRENDS_PIE, + TRENDS_BAR_VALUE, + TRENDS_WORLD_MAP, + TRENDS_BOLD_NUMBER, +] # Sync with frontend NON_BREAKDOWN_DISPLAY_TYPES NON_BREAKDOWN_DISPLAY_TYPES = [TRENDS_BOLD_NUMBER] diff --git a/posthog/demo/legacy/app_data_generator.py b/posthog/demo/legacy/app_data_generator.py index 51a12e3d486ff..56f7c3ebcecc2 100644 --- a/posthog/demo/legacy/app_data_generator.py +++ b/posthog/demo/legacy/app_data_generator.py @@ -50,7 +50,12 @@ def create_actions_dashboards(self): "order": 0, "type": TREND_FILTER_TYPE_ACTIONS, }, - {"id": rated_app_action.id, "name": "Rated App", "order": 1, "type": TREND_FILTER_TYPE_ACTIONS}, + { + "id": rated_app_action.id, + "name": "Rated App", + "order": 1, + "type": TREND_FILTER_TYPE_ACTIONS, + }, { "id": rated_app_action.id, "name": "Rated App", @@ -68,8 +73,16 @@ def create_actions_dashboards(self): def populate_person_events(self, person: Person, distinct_id: str, _index: int): start_day = random.randint(1, self.n_days) - self.add_event(event="$pageview", distinct_id=distinct_id, timestamp=now() - relativedelta(days=start_day)) - self.add_event(event="installed_app", distinct_id=distinct_id, timestamp=now() - relativedelta(days=start_day)) + self.add_event( + event="$pageview", + distinct_id=distinct_id, + timestamp=now() - relativedelta(days=start_day), + ) + self.add_event( + event="installed_app", + distinct_id=distinct_id, + timestamp=now() - relativedelta(days=start_day), + ) if random.randint(0, 10) <= 9: self.add_event( diff --git a/posthog/demo/legacy/data_generator.py b/posthog/demo/legacy/data_generator.py index 65bdd350acc88..ccc9f163e6c3c 100644 --- a/posthog/demo/legacy/data_generator.py +++ b/posthog/demo/legacy/data_generator.py @@ -3,7 +3,9 @@ from posthog.models import Person, PersonDistinctId, Team from posthog.models.utils import UUIDT -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) class DataGenerator: diff --git a/posthog/demo/legacy/revenue_data_generator.py b/posthog/demo/legacy/revenue_data_generator.py index 641cd30f5250e..2fa4901389eb0 100644 --- a/posthog/demo/legacy/revenue_data_generator.py +++ b/posthog/demo/legacy/revenue_data_generator.py @@ -29,7 +29,9 @@ def create_missing_events_and_properties(self): def populate_person_events(self, person: Person, distinct_id: str, index: int): if random.randint(0, 10) <= 4: self.add_event( - event="entered_free_trial", distinct_id=distinct_id, timestamp=now() - relativedelta(days=345) + event="entered_free_trial", + distinct_id=distinct_id, + timestamp=now() - relativedelta(days=345), ) self.add_event( @@ -68,7 +70,14 @@ def create_actions_dashboards(self): team=self.team, name="Entered Free Trial -> Purchase (Premium)", filters={ - "events": [{"id": "$pageview", "name": "Pageview", "order": 0, "type": TREND_FILTER_TYPE_ACTIONS}], + "events": [ + { + "id": "$pageview", + "name": "Pageview", + "order": 0, + "type": TREND_FILTER_TYPE_ACTIONS, + } + ], "actions": [ { "id": purchase_action.id, diff --git a/posthog/demo/legacy/web_data_generator.py b/posthog/demo/legacy/web_data_generator.py index e74ddc53bfe99..aa0836d3db732 100644 --- a/posthog/demo/legacy/web_data_generator.py +++ b/posthog/demo/legacy/web_data_generator.py @@ -7,7 +7,15 @@ from django.utils.timezone import now from posthog.constants import TREND_FILTER_TYPE_ACTIONS -from posthog.models import Action, ActionStep, Dashboard, DashboardTile, Insight, Person, PropertyDefinition +from posthog.models import ( + Action, + ActionStep, + Dashboard, + DashboardTile, + Insight, + Person, + PropertyDefinition, +) from posthog.models.filters.mixins.utils import cached_property from posthog.models.utils import UUIDT from posthog.utils import get_absolute_path @@ -27,7 +35,12 @@ def create_missing_events_and_properties(self): def create_actions_dashboards(self): homepage = Action.objects.create(team=self.team, name="Hogflix homepage view") - ActionStep.objects.create(action=homepage, event="$pageview", url="http://hogflix.com", url_matching="exact") + ActionStep.objects.create( + action=homepage, + event="$pageview", + url="http://hogflix.com", + url_matching="exact", + ) user_signed_up = Action.objects.create(team=self.team, name="Hogflix signed up") ActionStep.objects.create( @@ -54,14 +67,24 @@ def create_actions_dashboards(self): description="Shows a conversion funnel from sign up to watching a movie.", filters={ "actions": [ - {"id": homepage.id, "name": "Hogflix homepage view", "order": 0, "type": TREND_FILTER_TYPE_ACTIONS}, + { + "id": homepage.id, + "name": "Hogflix homepage view", + "order": 0, + "type": TREND_FILTER_TYPE_ACTIONS, + }, { "id": user_signed_up.id, "name": "Hogflix signed up", "order": 1, "type": TREND_FILTER_TYPE_ACTIONS, }, - {"id": user_paid.id, "name": "Hogflix paid", "order": 2, "type": TREND_FILTER_TYPE_ACTIONS}, + { + "id": user_paid.id, + "name": "Hogflix paid", + "order": 2, + "type": TREND_FILTER_TYPE_ACTIONS, + }, ], "insight": "FUNNELS", }, @@ -77,7 +100,11 @@ def populate_person_events(self, person: Person, distinct_id: str, index: int): event="$pageview", distinct_id=distinct_id, timestamp=now() - relativedelta(days=start_day), - properties={"$current_url": "http://hogflix.com", "$browser": browser, "$lib": "web"}, + properties={ + "$current_url": "http://hogflix.com", + "$browser": browser, + "$lib": "web", + }, ) self.add_event( @@ -107,7 +134,11 @@ def populate_person_events(self, person: Person, distinct_id: str, index: int): self.add_event( event="$pageview", distinct_id=distinct_id, - properties={"$current_url": "http://hogflix.com/2", "$browser": browser, "$lib": "web"}, + properties={ + "$current_url": "http://hogflix.com/2", + "$browser": browser, + "$lib": "web", + }, timestamp=now() - relativedelta(days=start_day) + relativedelta(seconds=30), ) if index % 5 == 0: @@ -131,7 +162,11 @@ def populate_person_events(self, person: Person, distinct_id: str, index: int): self.add_event( event="$pageview", distinct_id=distinct_id, - properties={"$current_url": "http://hogflix.com/3", "$browser": browser, "$lib": "web"}, + properties={ + "$current_url": "http://hogflix.com/3", + "$browser": browser, + "$lib": "web", + }, timestamp=now() - relativedelta(days=start_day) + relativedelta(seconds=60), ) diff --git a/posthog/demo/matrix/manager.py b/posthog/demo/matrix/manager.py index 8b13bd78c2b24..c174b2c782bc9 100644 --- a/posthog/demo/matrix/manager.py +++ b/posthog/demo/matrix/manager.py @@ -66,7 +66,12 @@ def ensure_account_and_save( with transaction.atomic(): organization = Organization.objects.create(**organization_kwargs) new_user = User.objects.create_and_join( - organization, email, password, first_name, OrganizationMembership.Level.ADMIN, is_staff=is_staff + organization, + email, + password, + first_name, + OrganizationMembership.Level.ADMIN, + is_staff=is_staff, ) team = self.create_team(organization) self.run_on_team(team, new_user) @@ -99,7 +104,11 @@ def reset_master(self): @staticmethod def create_team(organization: Organization, **kwargs) -> Team: team = Team.objects.create( - organization=organization, ingested_event=True, completed_snippet_onboarding=True, is_demo=True, **kwargs + organization=organization, + ingested_event=True, + completed_snippet_onboarding=True, + is_demo=True, + **kwargs, ) return team @@ -132,11 +141,19 @@ def _save_analytics_data(self, data_team: Team): for group_type_index, (group_type, groups) in enumerate(self.matrix.groups.items()): group_type_index += self.matrix.group_type_index_offset # Adjust bulk_group_type_mappings.append( - GroupTypeMapping(team=data_team, group_type_index=group_type_index, group_type=group_type) + GroupTypeMapping( + team=data_team, + group_type_index=group_type_index, + group_type=group_type, + ) ) for group_key, group in groups.items(): self._save_sim_group( - data_team, cast(Literal[0, 1, 2, 3, 4], group_type_index), group_key, group, self.matrix.now + data_team, + cast(Literal[0, 1, 2, 3, 4], group_type_index), + group_key, + group, + self.matrix.now, ) try: GroupTypeMapping.objects.bulk_create(bulk_group_type_mappings) @@ -164,16 +181,28 @@ def _create_master_team(cls) -> Team: @classmethod def _erase_master_team_data(cls): AsyncEventDeletion().process( - [AsyncDeletion(team_id=cls.MASTER_TEAM_ID, key=cls.MASTER_TEAM_ID, deletion_type=DeletionType.Team)] + [ + AsyncDeletion( + team_id=cls.MASTER_TEAM_ID, + key=cls.MASTER_TEAM_ID, + deletion_type=DeletionType.Team, + ) + ] ) GroupTypeMapping.objects.filter(team_id=cls.MASTER_TEAM_ID).delete() def _copy_analytics_data_from_master_team(self, target_team: Team): from posthog.models.event.sql import COPY_EVENTS_BETWEEN_TEAMS from posthog.models.group.sql import COPY_GROUPS_BETWEEN_TEAMS - from posthog.models.person.sql import COPY_PERSON_DISTINCT_ID2S_BETWEEN_TEAMS, COPY_PERSONS_BETWEEN_TEAMS + from posthog.models.person.sql import ( + COPY_PERSON_DISTINCT_ID2S_BETWEEN_TEAMS, + COPY_PERSONS_BETWEEN_TEAMS, + ) - copy_params = {"source_team_id": self.MASTER_TEAM_ID, "target_team_id": target_team.pk} + copy_params = { + "source_team_id": self.MASTER_TEAM_ID, + "target_team_id": target_team.pk, + } sync_execute(COPY_PERSONS_BETWEEN_TEAMS, copy_params) sync_execute(COPY_PERSON_DISTINCT_ID2S_BETWEEN_TEAMS, copy_params) sync_execute(COPY_EVENTS_BETWEEN_TEAMS, copy_params) @@ -191,7 +220,10 @@ def _copy_analytics_data_from_master_team(self, target_team: Team): @classmethod def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id: int): from posthog.models.group.sql import SELECT_GROUPS_OF_TEAM - from posthog.models.person.sql import SELECT_PERSON_DISTINCT_ID2S_OF_TEAM, SELECT_PERSONS_OF_TEAM + from posthog.models.person.sql import ( + SELECT_PERSON_DISTINCT_ID2S_OF_TEAM, + SELECT_PERSONS_OF_TEAM, + ) list_params = {"source_team_id": source_team_id} # Persons @@ -220,7 +252,11 @@ def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id person_uuid = row.pop("person_uuid") try: bulk_person_distinct_ids.append( - PersonDistinctId(team_id=target_team_id, person_id=bulk_persons[person_uuid].pk, **row) + PersonDistinctId( + team_id=target_team_id, + person_id=bulk_persons[person_uuid].pk, + **row, + ) ) except KeyError: pre_existing_id_count -= 1 @@ -232,7 +268,14 @@ def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id bulk_groups = [] for row in clickhouse_groups: group_properties = json.loads(row.pop("group_properties", "{}")) - bulk_groups.append(Group(team_id=target_team_id, version=0, group_properties=group_properties, **row)) + bulk_groups.append( + Group( + team_id=target_team_id, + version=0, + group_properties=group_properties, + **row, + ) + ) try: Group.objects.bulk_create(bulk_groups) except IntegrityError as e: @@ -241,16 +284,24 @@ def _sync_postgres_with_clickhouse_data(cls, source_team_id: int, target_team_id def _save_sim_person(self, team: Team, subject: SimPerson): # We only want to save directly if there are past events if subject.past_events: - from posthog.models.person.util import create_person, create_person_distinct_id + from posthog.models.person.util import ( + create_person, + create_person_distinct_id, + ) create_person( - uuid=str(subject.in_posthog_id), team_id=team.pk, properties=subject.properties_at_now, version=0 + uuid=str(subject.in_posthog_id), + team_id=team.pk, + properties=subject.properties_at_now, + version=0, ) self._persons_created += 1 self._person_distinct_ids_created += len(subject.distinct_ids_at_now) for distinct_id in subject.distinct_ids_at_now: create_person_distinct_id( - team_id=team.pk, distinct_id=str(distinct_id), person_id=str(subject.in_posthog_id) + team_id=team.pk, + distinct_id=str(distinct_id), + person_id=str(subject.in_posthog_id), ) self._save_past_sim_events(team, subject.past_events) # We only want to queue future events if there are any @@ -294,14 +345,21 @@ def _save_future_sim_events(team: Team, events: List[SimEvent]): @staticmethod def _save_sim_group( - team: Team, type_index: Literal[0, 1, 2, 3, 4], key: str, properties: Dict[str, Any], timestamp: dt.datetime + team: Team, + type_index: Literal[0, 1, 2, 3, 4], + key: str, + properties: Dict[str, Any], + timestamp: dt.datetime, ): from posthog.models.group.util import raw_create_group_ch raw_create_group_ch(team.pk, type_index, key, properties, timestamp) def _sleep_until_person_data_in_clickhouse(self, team_id: int): - from posthog.models.person.sql import GET_PERSON_COUNT_FOR_TEAM, GET_PERSON_DISTINCT_ID2_COUNT_FOR_TEAM + from posthog.models.person.sql import ( + GET_PERSON_COUNT_FOR_TEAM, + GET_PERSON_DISTINCT_ID2_COUNT_FOR_TEAM, + ) while True: person_count: int = sync_execute(GET_PERSON_COUNT_FOR_TEAM, {"team_id": team_id})[0][0] diff --git a/posthog/demo/matrix/matrix.py b/posthog/demo/matrix/matrix.py index 1a080057a0ceb..d94988bc4210d 100644 --- a/posthog/demo/matrix/matrix.py +++ b/posthog/demo/matrix/matrix.py @@ -72,7 +72,12 @@ def __init__(self, *, index: int, matrix: "Matrix") -> None: self.radius = int(self.MIN_RADIUS + self.radius_distribution() * (self.MAX_RADIUS - self.MIN_RADIUS)) self.people_matrix = [ [ - matrix.PERSON_CLASS(kernel=(x == self.radius and y == self.radius), x=x, y=y, cluster=self) + matrix.PERSON_CLASS( + kernel=(x == self.radius and y == self.radius), + x=x, + y=y, + cluster=self, + ) for x in range(1 + self.radius * 2) ] for y in range(1 + self.radius * 2) diff --git a/posthog/demo/matrix/models.py b/posthog/demo/matrix/models.py index fbb1dff7f98ff..a2e7796518914 100644 --- a/posthog/demo/matrix/models.py +++ b/posthog/demo/matrix/models.py @@ -181,7 +181,11 @@ class SimBrowserClient(SimClient): def __init__(self, person: "SimPerson"): self.person = person self.matrix = person.cluster.matrix - self.device_type, self.os, self.browser = self.person.cluster.properties_provider.device_type_os_browser() + ( + self.device_type, + self.os, + self.browser, + ) = self.person.cluster.properties_provider.device_type_os_browser() self.device_id = str(UUID(int=self.person.cluster.random.getrandbits(128))) self.active_distinct_id = self.device_id # Pre-`$identify`, the device ID is used as the distinct ID self.active_session_id = None @@ -223,7 +227,10 @@ def capture(self, event: str, properties: Optional[Properties] = None): if properties: if referrer := properties.get("$referrer"): referring_domain = urlparse(referrer).netloc if referrer != "$direct" else referrer - referrer_properties = {"$referrer": referrer, "$referring_domain": referring_domain} + referrer_properties = { + "$referrer": referrer, + "$referring_domain": referring_domain, + } self.register(referrer_properties) combined_properties["$set"].update(referrer_properties) combined_properties["$referring_domain"] = referring_domain @@ -235,7 +242,11 @@ def capture(self, event: str, properties: Optional[Properties] = None): super()._capture_raw(event, combined_properties, distinct_id=self.active_distinct_id) def capture_pageview( - self, current_url: str, properties: Optional[Properties] = None, *, referrer: Optional[str] = None + self, + current_url: str, + properties: Optional[Properties] = None, + *, + referrer: Optional[str] = None, ): """Capture a $pageview event. $pageleave is handled implicitly.""" if self.current_url is not None: @@ -259,14 +270,24 @@ def identify(self, distinct_id: Optional[str], set_properties: Optional[Properti self.active_distinct_id = distinct_id self.capture(EVENT_IDENTIFY, identify_properties) - def group(self, group_type: str, group_key: str, set_properties: Optional[Properties] = None): + def group( + self, + group_type: str, + group_key: str, + set_properties: Optional[Properties] = None, + ): """Link the person to the specified group. Similar to JS `posthog.group()`.""" if set_properties is None: set_properties = {} self.person._groups[group_type] = group_key self.person.cluster.matrix._update_group(group_type, group_key, set_properties) self.capture( - EVENT_GROUP_IDENTIFY, {"$group_type": group_type, "$group_key": group_key, "$group_set": set_properties} + EVENT_GROUP_IDENTIFY, + { + "$group_type": group_type, + "$group_key": group_key, + "$group_set": set_properties, + }, ) def reset(self): @@ -404,7 +425,13 @@ def schedule_effect( An effect is a function that runs on the person, so it can change the person's state.""" self.cluster.raw_schedule_effect( - Effect(timestamp=timestamp, callback=callback, source=self, target=target, condition=condition) + Effect( + timestamp=timestamp, + callback=callback, + source=self, + target=target, + condition=condition, + ) ) # Person state @@ -423,7 +450,14 @@ def move_attribute(self, attr: str, delta: float) -> Literal[True]: setattr(self, attr, getattr(self, attr) + delta) return True - def _append_event(self, event: str, properties: Properties, *, distinct_id: str, timestamp: dt.datetime): + def _append_event( + self, + event: str, + properties: Properties, + *, + distinct_id: str, + timestamp: dt.datetime, + ): """Append event to `past_events` or `future_events`, whichever is appropriate.""" if self.in_posthog_id is None: self.in_posthog_id = self.cluster.roll_uuidt() diff --git a/posthog/demo/matrix/randomization.py b/posthog/demo/matrix/randomization.py index c0d6a8edb6bb3..ca6bcfd588640 100644 --- a/posthog/demo/matrix/randomization.py +++ b/posthog/demo/matrix/randomization.py @@ -23,19 +23,28 @@ class Industry(str, Enum): class PropertiesProvider(mimesis.BaseProvider): # Somewhat realistically segmented and weighted pools for random properties: device type/OS/browser - DEVICE_TYPE_WEIGHTED_POOL: WeightedPool = (["Desktop", "Mobile", "Tablet"], [8, 1, 1]) + DEVICE_TYPE_WEIGHTED_POOL: WeightedPool = ( + ["Desktop", "Mobile", "Tablet"], + [8, 1, 1], + ) OS_WEIGHTED_POOLS: Dict[str, WeightedPool] = { "Desktop": (["Windows", "Mac OS X", "Linux", "Chrome OS"], [18, 16, 7, 1]), "Mobile": (["iOS", "Android"], [1, 1]), "Tablet": (["iOS", "Android"], [1, 1]), } BROWSER_WEIGHTED_POOLS: Dict[str, WeightedPool] = { - "Windows": (["Chrome", "Firefox", "Opera", "Microsoft Edge", "Internet Explorer"], [12, 4, 2, 1, 1]), + "Windows": ( + ["Chrome", "Firefox", "Opera", "Microsoft Edge", "Internet Explorer"], + [12, 4, 2, 1, 1], + ), "Mac OS X": (["Chrome", "Firefox", "Opera", "Safari"], [4, 2, 1, 2]), "Linux": (["Chrome", "Firefox", "Opera"], [3, 3, 1]), "Chrome OS": (["Chrome"], [1]), "iOS": (["Mobile Safari", "Chrome iOS", "Firefox iOS"], [8, 1, 1]), - "Android": (["Chrome", "Android Mobile", "Samsung Internet", "Firefox"], [5, 3, 3, 1]), + "Android": ( + ["Chrome", "Android Mobile", "Samsung Internet", "Firefox"], + [5, 3, 3, 1], + ), } INDUSTRY_POOL = ( diff --git a/posthog/demo/products/hedgebox/matrix.py b/posthog/demo/products/hedgebox/matrix.py index ed863556ecbf4..5c169ad0afd28 100644 --- a/posthog/demo/products/hedgebox/matrix.py +++ b/posthog/demo/products/hedgebox/matrix.py @@ -4,7 +4,13 @@ from django.db import IntegrityError -from posthog.constants import INSIGHT_TRENDS, PAGEVIEW_EVENT, RETENTION_FIRST_TIME, TRENDS_LINEAR, TRENDS_WORLD_MAP +from posthog.constants import ( + INSIGHT_TRENDS, + PAGEVIEW_EVENT, + RETENTION_FIRST_TIME, + TRENDS_LINEAR, + TRENDS_WORLD_MAP, +) from posthog.demo.matrix.matrix import Cluster, Matrix from posthog.demo.matrix.randomization import Industry from posthog.models import ( @@ -46,7 +52,8 @@ def __init__(self, *args, **kwargs): is_company = self.random.random() < COMPANY_CLUSTERS_PROPORTION if is_company: self.company = HedgdboxCompany( - name=self.finance_provider.company(), industry=self.properties_provider.industry() + name=self.finance_provider.company(), + industry=self.properties_provider.industry(), ) else: self.company = None @@ -81,7 +88,10 @@ def set_project_up(self, team, user): # Actions interacted_with_file_action = Action.objects.create( - name="Interacted with file", team=team, description="Logged-in interaction with a file.", created_by=user + name="Interacted with file", + team=team, + description="Logged-in interaction with a file.", + created_by=user, ) ActionStep.objects.bulk_create( ( @@ -97,7 +107,18 @@ def set_project_up(self, team, user): team=team, name="Signed-up users", created_by=user, - groups=[{"properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "type": "person", + "value": "is_set", + "operator": "is_set", + } + ] + } + ], ) real_users_cohort = Cohort.objects.create( team=team, @@ -105,14 +126,26 @@ def set_project_up(self, team, user): description="People who don't belong to the Hedgebox team.", created_by=user, groups=[ - {"properties": [{"key": "email", "type": "person", "value": "@hedgebox.net$", "operator": "not_regex"}]} + { + "properties": [ + { + "key": "email", + "type": "person", + "value": "@hedgebox.net$", + "operator": "not_regex", + } + ] + } ], ) team.test_account_filters = [{"key": "id", "type": "cohort", "value": real_users_cohort.pk}] # Dashboard: Key metrics (project home) key_metrics_dashboard = Dashboard.objects.create( - team=team, name="🔑 Key metrics", description="Company overview.", pinned=True + team=team, + name="🔑 Key metrics", + description="Company overview.", + pinned=True, ) team.primary_dashboard = key_metrics_dashboard weekly_signups_insight = Insight.objects.create( @@ -137,7 +170,16 @@ def set_project_up(self, team, user): color="blue", layouts={ "sm": {"h": 5, "w": 6, "x": 0, "y": 0, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 0, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 0, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) signups_by_country_insight = Insight.objects.create( @@ -162,7 +204,16 @@ def set_project_up(self, team, user): insight=signups_by_country_insight, layouts={ "sm": {"h": 5, "w": 6, "x": 6, "y": 0, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 5, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 5, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) activation_funnel = Insight.objects.create( @@ -210,7 +261,16 @@ def set_project_up(self, team, user): insight=activation_funnel, layouts={ "sm": {"h": 5, "w": 6, "x": 0, "y": 5, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 10, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 10, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) new_user_retention = Insight.objects.create( @@ -227,11 +287,23 @@ def set_project_up(self, team, user): "values": [ { "type": "AND", - "values": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + "values": [ + { + "key": "email", + "type": "person", + "value": "is_set", + "operator": "is_set", + } + ], } ], }, - "target_entity": {"id": EVENT_SIGNED_UP, "name": EVENT_SIGNED_UP, "type": "events", "order": 0}, + "target_entity": { + "id": EVENT_SIGNED_UP, + "name": EVENT_SIGNED_UP, + "type": "events", + "order": 0, + }, "retention_type": RETENTION_FIRST_TIME, "total_intervals": 9, "returning_entity": { @@ -249,7 +321,16 @@ def set_project_up(self, team, user): insight=new_user_retention, layouts={ "sm": {"h": 5, "w": 6, "x": 6, "y": 5, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 15, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 15, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) active_user_lifecycle = Insight.objects.create( @@ -287,7 +368,16 @@ def set_project_up(self, team, user): insight=active_user_lifecycle, layouts={ "sm": {"h": 5, "w": 6, "x": 0, "y": 10, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 20, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 20, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) weekly_file_volume = Insight.objects.create( @@ -333,7 +423,16 @@ def set_project_up(self, team, user): insight=weekly_file_volume, layouts={ "sm": {"h": 5, "w": 6, "x": 6, "y": 10, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 25, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 25, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) @@ -346,7 +445,13 @@ def set_project_up(self, team, user): name="Monthly app revenue", filters={ "events": [ - {"id": EVENT_PAID_BILL, "type": "events", "order": 0, "math": "sum", "math_property": "amount_usd"} + { + "id": EVENT_PAID_BILL, + "type": "events", + "order": 0, + "math": "sum", + "math_property": "amount_usd", + } ], "actions": [], "display": TRENDS_LINEAR, @@ -362,7 +467,16 @@ def set_project_up(self, team, user): insight=monthly_app_revenue_trends, layouts={ "sm": {"h": 5, "w": 6, "x": 0, "y": 0, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 0, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 0, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) bills_paid_trends = Insight.objects.create( @@ -399,7 +513,16 @@ def set_project_up(self, team, user): insight=bills_paid_trends, layouts={ "sm": {"h": 5, "w": 6, "x": 6, "y": 0, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 5, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 5, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) @@ -426,7 +549,16 @@ def set_project_up(self, team, user): insight=daily_unique_visitors_trends, layouts={ "sm": {"h": 5, "w": 6, "x": 0, "y": 0, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 0, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 0, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) most_popular_pages_trends = Insight.objects.create( @@ -435,7 +567,14 @@ def set_project_up(self, team, user): saved=True, name="Most popular pages", filters={ - "events": [{"id": PAGEVIEW_EVENT, "math": "total", "type": "events", "order": 0}], + "events": [ + { + "id": PAGEVIEW_EVENT, + "math": "total", + "type": "events", + "order": 0, + } + ], "actions": [], "display": "ActionsTable", "insight": "TRENDS", @@ -469,7 +608,16 @@ def set_project_up(self, team, user): insight=most_popular_pages_trends, layouts={ "sm": {"h": 5, "w": 6, "x": 6, "y": 0, "minH": 5, "minW": 3}, - "xs": {"h": 5, "w": 1, "x": 0, "y": 5, "minH": 5, "minW": 3, "moved": False, "static": False}, + "xs": { + "h": 5, + "w": 1, + "x": 0, + "y": 5, + "minH": 5, + "minW": 3, + "moved": False, + "static": False, + }, }, ) @@ -487,7 +635,12 @@ def set_project_up(self, team, user): "type": "events", "order": 0, "properties": [ - {"key": "$current_url", "type": "event", "value": URL_HOME, "operator": "exact"} + { + "key": "$current_url", + "type": "event", + "value": URL_HOME, + "operator": "exact", + } ], }, { @@ -497,10 +650,21 @@ def set_project_up(self, team, user): "type": "events", "order": 1, "properties": [ - {"key": "$current_url", "type": "event", "value": URL_SIGNUP, "operator": "regex"} + { + "key": "$current_url", + "type": "event", + "value": URL_SIGNUP, + "operator": "regex", + } ], }, - {"custom_name": "Signed up", "id": "signed_up", "name": "signed_up", "type": "events", "order": 2}, + { + "custom_name": "Signed up", + "id": "signed_up", + "name": "signed_up", + "type": "events", + "order": 2, + }, ], "actions": [], "display": "FunnelViz", @@ -563,7 +727,11 @@ def set_project_up(self, team, user): user=user, insight=insight, last_viewed_at=( - self.now - dt.timedelta(days=self.random.randint(0, 3), minutes=self.random.randint(5, 60)) + self.now + - dt.timedelta( + days=self.random.randint(0, 3), + minutes=self.random.randint(5, 60), + ) ), ) for insight in Insight.objects.filter(team=team) @@ -610,8 +778,14 @@ def set_project_up(self, team, user): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "control", "rollout_percentage": 100 - NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT}, - {"key": "test", "rollout_percentage": NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT}, + { + "key": "control", + "rollout_percentage": 100 - NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT, + }, + { + "key": "test", + "rollout_percentage": NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT, + }, ] }, }, @@ -632,10 +806,20 @@ def set_project_up(self, team, user): "type": "events", "order": 0, "properties": [ - {"key": "$current_url", "type": "event", "value": URL_SIGNUP, "operator": "exact"} + { + "key": "$current_url", + "type": "event", + "value": URL_SIGNUP, + "operator": "exact", + } ], }, - {"id": "signed_up", "name": "signed_up", "type": "events", "order": 1}, + { + "id": "signed_up", + "name": "signed_up", + "type": "events", + "order": 1, + }, ], "actions": [], "display": "FunnelViz", @@ -646,8 +830,14 @@ def set_project_up(self, team, user): }, parameters={ "feature_flag_variants": [ - {"key": "control", "rollout_percentage": 100 - NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT}, - {"key": "test", "rollout_percentage": NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT}, + { + "key": "control", + "rollout_percentage": 100 - NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT, + }, + { + "key": "test", + "rollout_percentage": NEW_SIGNUP_PAGE_FLAG_ROLLOUT_PERCENT, + }, ], "recommended_sample_size": int(len(self.clusters) * 0.274), "recommended_running_time": None, diff --git a/posthog/demo/products/hedgebox/models.py b/posthog/demo/products/hedgebox/models.py index 132f3d6ac5f32..1c0a0e4ffd0da 100644 --- a/posthog/demo/products/hedgebox/models.py +++ b/posthog/demo/products/hedgebox/models.py @@ -279,9 +279,18 @@ def determine_session_intent(self) -> Optional[HedgeboxSessionIntent]: # The more files, the more likely to delete/download/share rather than upload possible_intents_with_weights.extend( [ - (HedgeboxSessionIntent.DELETE_FILE_S, math.log10(file_count) / 8 if file_count else 0), - (HedgeboxSessionIntent.DOWNLOAD_OWN_FILE_S, math.log10(file_count + 1) if file_count else 0), - (HedgeboxSessionIntent.SHARE_FILE, math.log10(file_count) / 3 if file_count else 0), + ( + HedgeboxSessionIntent.DELETE_FILE_S, + math.log10(file_count) / 8 if file_count else 0, + ), + ( + HedgeboxSessionIntent.DOWNLOAD_OWN_FILE_S, + math.log10(file_count + 1) if file_count else 0, + ), + ( + HedgeboxSessionIntent.SHARE_FILE, + math.log10(file_count) / 3 if file_count else 0, + ), ] ) if self.account.allocation_used_fraction < 0.99: @@ -304,7 +313,8 @@ def determine_session_intent(self) -> Optional[HedgeboxSessionIntent]: if possible_intents_with_weights: possible_intents, weights = zip(*possible_intents_with_weights) return self.cluster.random.choices( - cast(Tuple[HedgeboxSessionIntent], possible_intents), cast(Tuple[float], weights) + cast(Tuple[HedgeboxSessionIntent], possible_intents), + cast(Tuple[float], weights), )[0] else: return None @@ -526,7 +536,10 @@ def go_to_shared_file(self, file: HedgeboxFile): self.active_client.capture_pageview(dyn_url_file(file.id)) self.advance_timer(0.5 + self.cluster.random.betavariate(1.2, 1.6) * 20) if self.cluster.random.random() < 0.7: - self.active_client.capture(EVENT_DOWNLOADED_FILE, {"file_type": file.type, "file_size_b": file.size_b}) + self.active_client.capture( + EVENT_DOWNLOADED_FILE, + {"file_type": file.type, "file_size_b": file.size_b}, + ) self.advance_timer(0.5 + self.cluster.random.betavariate(1.2, 2) * 80) self.need += (self.cluster.random.betavariate(1.2, 1) - 0.5) * 0.08 if self.cluster.random.random() < 0.2: @@ -537,13 +550,20 @@ def go_to_account_settings(self): self.advance_timer(1 + self.cluster.random.betavariate(1.2, 1.2) * 5) random = self.cluster.random.random() if ( - self.active_session_intent in (HedgeboxSessionIntent.UPGRADE_PLAN, HedgeboxSessionIntent.DOWNGRADE_PLAN) + self.active_session_intent + in ( + HedgeboxSessionIntent.UPGRADE_PLAN, + HedgeboxSessionIntent.DOWNGRADE_PLAN, + ) or random < 0.1 ): self.go_to_account_billing() elif ( self.active_session_intent - in (HedgeboxSessionIntent.INVITE_TEAM_MEMBER, HedgeboxSessionIntent.REMOVE_TEAM_MEMBER) + in ( + HedgeboxSessionIntent.INVITE_TEAM_MEMBER, + HedgeboxSessionIntent.REMOVE_TEAM_MEMBER, + ) or random < 0.1 ): self.go_to_account_team() @@ -609,7 +629,11 @@ def join_team(self): raise ValueError("Cannot join team without an account") self.active_client.capture(EVENT_SIGNED_UP, {"from_invite": True}) self.advance_timer(self.cluster.random.uniform(0.1, 0.2)) - self.active_client.group(GROUP_TYPE_ACCOUNT, self.account.id, {"team_size": len(self.account.team_members)}) + self.active_client.group( + GROUP_TYPE_ACCOUNT, + self.account.id, + {"team_size": len(self.account.team_members)}, + ) self.account.team_members.add(self) def upload_file(self, file: HedgeboxFile): @@ -618,12 +642,19 @@ def upload_file(self, file: HedgeboxFile): self.account.files.add(file) self.active_client.capture( EVENT_UPLOADED_FILE, - properties={"file_type": file.type, "file_size_b": file.size_b, "used_mb": self.account.current_used_mb}, + properties={ + "file_type": file.type, + "file_size_b": file.size_b, + "used_mb": self.account.current_used_mb, + }, ) self.active_client.group( GROUP_TYPE_ACCOUNT, self.account.id, - {"used_mb": self.account.current_used_mb, "file_count": len(self.account.files)}, + { + "used_mb": self.account.current_used_mb, + "file_count": len(self.account.files), + }, ) self.satisfaction += self.cluster.random.uniform(-0.19, 0.2) if self.satisfaction > 0.9: @@ -643,7 +674,10 @@ def delete_file(self, file: HedgeboxFile): self.active_client.group( GROUP_TYPE_ACCOUNT, self.account.id, - {"used_mb": self.account.current_used_mb, "file_count": len(self.account.files)}, + { + "used_mb": self.account.current_used_mb, + "file_count": len(self.account.files), + }, ) def share_file(self, file: HedgeboxFile): @@ -662,7 +696,8 @@ def upgrade_plan(self): if new_plan is None: raise ValueError("There's no successor plan") self.active_client.capture( - EVENT_UPGRADED_PLAN, {"previous_plan": str(previous_plan), "new_plan": str(new_plan)} + EVENT_UPGRADED_PLAN, + {"previous_plan": str(previous_plan), "new_plan": str(new_plan)}, ) self.advance_timer(self.cluster.random.betavariate(1.2, 1.2) * 2) self.schedule_effect( @@ -678,7 +713,11 @@ def upgrade_plan(self): ) for i in range(future_months): bill_timestamp = self.cluster.simulation_time + dt.timedelta(days=30 * i) - self.schedule_effect(bill_timestamp, lambda person: person.bill_account(), Effect.Target.SELF) + self.schedule_effect( + bill_timestamp, + lambda person: person.bill_account(), + Effect.Target.SELF, + ) def downgrade_plan(self): assert self.account is not None @@ -687,7 +726,8 @@ def downgrade_plan(self): if new_plan is None: raise ValueError("There's no predecessor plan") self.active_client.capture( - EVENT_DOWNGRADED_PLAN, {"previous_plan": str(previous_plan), "new_plan": str(new_plan)} + EVENT_DOWNGRADED_PLAN, + {"previous_plan": str(previous_plan), "new_plan": str(new_plan)}, ) self.account.plan = new_plan @@ -716,7 +756,10 @@ def bill_account(self): if self.account and self.account.current_monthly_bill_usd: self.cluster.matrix.server_client.capture( EVENT_PAID_BILL, - {"amount_usd": self.account.current_monthly_bill_usd, "plan": self.account.plan}, + { + "amount_usd": self.account.current_monthly_bill_usd, + "plan": self.account.plan, + }, distinct_id=self.in_product_id, ) diff --git a/posthog/demo/test/test_matrix_manager.py b/posthog/demo/test/test_matrix_manager.py index 99f0451c5485d..25770553ab613 100644 --- a/posthog/demo/test/test_matrix_manager.py +++ b/posthog/demo/test/test_matrix_manager.py @@ -55,7 +55,9 @@ class TestMatrixManager(ClickhouseDestroyTablesMixin): def setUpTestData(cls): super().setUpTestData() cls.matrix = DummyMatrix( - n_clusters=3, now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo("UTC")), days_future=0 + n_clusters=3, + now=dt.datetime(2020, 1, 1, 0, 0, 0, 0, tzinfo=ZoneInfo("UTC")), + days_future=0, ) cls.matrix.simulate() @@ -83,7 +85,10 @@ def test_run_on_team(self): # At least one event for each cluster assert ( - sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk})[0][0] >= 3 + sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[ + 0 + ][0] + >= 3 ) assert self.team.name == DummyMatrix.PRODUCT_NAME @@ -95,5 +100,8 @@ def test_run_on_team_using_pre_save(self): # At least one event for each cluster assert sync_execute("SELECT count() FROM events WHERE team_id = 0")[0][0] >= 3 assert ( - sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk})[0][0] >= 3 + sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[ + 0 + ][0] + >= 3 ) diff --git a/posthog/email.py b/posthog/email.py index 579d68cb0e0ac..93968b6c07844 100644 --- a/posthog/email.py +++ b/posthog/email.py @@ -59,7 +59,6 @@ def _send_email( records: List = [] with transaction.atomic(): - for dest in to: record, _ = MessagingRecord.objects.get_or_create(raw_email=dest["raw_email"], campaign_key=campaign_key) @@ -113,7 +112,11 @@ def _send_email( try: connection.close() # type: ignore except Exception as err: - print("Could not close email connection (this can be ignored):", err, file=sys.stderr) + print( + "Could not close email connection (this can be ignored):", + err, + file=sys.stderr, + ) class EmailMessage: diff --git a/posthog/event_usage.py b/posthog/event_usage.py index 7d238a29738e6..fa69f0c23662b 100644 --- a/posthog/event_usage.py +++ b/posthog/event_usage.py @@ -93,7 +93,8 @@ def report_user_joined_organization(organization: Organization, current_user: Us def report_user_logged_in( - user: User, social_provider: str = "" # which third-party provider processed the login (empty = no third-party) + user: User, + social_provider: str = "", # which third-party provider processed the login (empty = no third-party) ) -> None: """ Reports that a user has logged in to PostHog. @@ -125,7 +126,9 @@ def report_user_password_reset(user: User) -> None: Reports a user resetting their password. """ posthoganalytics.capture( - user.distinct_id, "user password reset", groups=groups(user.current_organization, user.current_team) + user.distinct_id, + "user password reset", + groups=groups(user.current_organization, user.current_team), ) @@ -195,13 +198,19 @@ def report_bulk_invited( def report_user_action(user: User, event: str, properties: Dict = {}): posthoganalytics.capture( - user.distinct_id, event, properties=properties, groups=groups(user.current_organization, user.current_team) + user.distinct_id, + event, + properties=properties, + groups=groups(user.current_organization, user.current_team), ) def report_organization_deleted(user: User, organization: Organization): posthoganalytics.capture( - user.distinct_id, "organization deleted", organization.get_analytics_metadata(), groups=groups(organization) + user.distinct_id, + "organization deleted", + organization.get_analytics_metadata(), + groups=groups(organization), ) @@ -219,7 +228,12 @@ def groups(organization: Optional[Organization] = None, team: Optional[Team] = N return result -def report_team_action(team: Team, event: str, properties: Dict = {}, group_properties: Optional[Dict] = None): +def report_team_action( + team: Team, + event: str, + properties: Dict = {}, + group_properties: Optional[Dict] = None, +): """ For capturing events where it is unclear which user was the core actor we can use the team instead """ @@ -230,13 +244,19 @@ def report_team_action(team: Team, event: str, properties: Dict = {}, group_prop def report_organization_action( - organization: Organization, event: str, properties: Dict = {}, group_properties: Optional[Dict] = None + organization: Organization, + event: str, + properties: Dict = {}, + group_properties: Optional[Dict] = None, ): """ For capturing events where it is unclear which user was the core actor we can use the organization instead """ posthoganalytics.capture( - str(organization.id), event, properties=properties, groups=groups(organization=organization) + str(organization.id), + event, + properties=properties, + groups=groups(organization=organization), ) if group_properties: diff --git a/posthog/exceptions.py b/posthog/exceptions.py index 1cdcc5f1bf957..a38b334b566fb 100644 --- a/posthog/exceptions.py +++ b/posthog/exceptions.py @@ -75,6 +75,7 @@ def generate_exception_response( from statshog.defaults.django import statsd statsd.incr( - f"posthog_cloud_raw_endpoint_exception", tags={"endpoint": endpoint, "code": code, "type": type, "attr": attr} + f"posthog_cloud_raw_endpoint_exception", + tags={"endpoint": endpoint, "code": code, "type": type, "attr": attr}, ) return JsonResponse({"type": type, "code": code, "detail": detail, "attr": attr}, status=status_code) diff --git a/posthog/filters.py b/posthog/filters.py index 0e8bb86cae7c6..ac098dea92c68 100644 --- a/posthog/filters.py +++ b/posthog/filters.py @@ -33,7 +33,12 @@ def get_search_terms(self, request: Request): terms = terms.replace("\x00", "") # strip null characters return list(filter(None, terms.split(" "))) - def filter_queryset(self, request: Request, queryset: Union[QuerySet[_MT], RawQuerySet], view: APIView): + def filter_queryset( + self, + request: Request, + queryset: Union[QuerySet[_MT], RawQuerySet], + view: APIView, + ): if isinstance(queryset, RawQuerySet): return queryset @@ -54,7 +59,9 @@ def filter_queryset(self, request: Request, queryset: Union[QuerySet[_MT], RawQu def term_search_filter_sql( - search_fields: List[str], search_terms: Optional[str] = "", search_extra: Optional[str] = "" + search_fields: List[str], + search_terms: Optional[str] = "", + search_extra: Optional[str] = "", ) -> Tuple[str, dict]: if not search_fields or not search_terms: return "", {} diff --git a/posthog/health.py b/posthog/health.py index 782fab5a2942a..a77e4d79718f3 100644 --- a/posthog/health.py +++ b/posthog/health.py @@ -59,7 +59,13 @@ # NOTE: we can be pretty picky about what the worker needs as by its nature # of reading from a durable queue rather that being required to perform # request/response, we are more resilient to service downtime. - "worker": ["http", "postgres", "postgres_migrations_uptodate", "clickhouse", "celery_broker"], + "worker": [ + "http", + "postgres", + "postgres_migrations_uptodate", + "clickhouse", + "celery_broker", + ], "decide": ["http"], } diff --git a/posthog/helpers/dashboard_templates.py b/posthog/helpers/dashboard_templates.py index 7a1179b3af5d3..cfaa2bac5e1d1 100644 --- a/posthog/helpers/dashboard_templates.py +++ b/posthog/helpers/dashboard_templates.py @@ -39,7 +39,9 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: dashboard.filters = {DATE_FROM: "-30d"} if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING): tag, _ = Tag.objects.get_or_create( - name="marketing", team_id=dashboard.team_id, defaults={"team_id": dashboard.team_id} + name="marketing", + team_id=dashboard.team_id, + defaults={"team_id": dashboard.team_id}, ) dashboard.tagged_items.create(tag_id=tag.id) dashboard.save(update_fields=["filters"]) @@ -50,7 +52,13 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: name="Website Unique Users (Total)", description="Shows the number of unique users that use your app every day.", filters={ - TREND_FILTER_TYPE_EVENTS: [{"id": "$pageview", "math": UNIQUE_USERS, "type": TREND_FILTER_TYPE_EVENTS}], + TREND_FILTER_TYPE_EVENTS: [ + { + "id": "$pageview", + "math": UNIQUE_USERS, + "type": TREND_FILTER_TYPE_EVENTS, + } + ], INTERVAL: "day", INSIGHT: INSIGHT_TRENDS, DATE_FROM: "-30d", @@ -77,7 +85,13 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: name="Organic SEO Unique Users (Total)", description="", filters={ - TREND_FILTER_TYPE_EVENTS: [{"id": "$pageview", "math": UNIQUE_USERS, "type": TREND_FILTER_TYPE_EVENTS}], + TREND_FILTER_TYPE_EVENTS: [ + { + "id": "$pageview", + "math": UNIQUE_USERS, + "type": TREND_FILTER_TYPE_EVENTS, + } + ], INTERVAL: "day", INSIGHT: INSIGHT_TRENDS, DATE_FROM: "-30d", @@ -89,8 +103,18 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: { "type": "AND", "values": [ - {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"}, - {"key": "utm_source", "type": "event", "value": "is_not_set", "operator": "is_not_set"}, + { + "key": "$referring_domain", + "type": "event", + "value": "google", + "operator": "icontains", + }, + { + "key": "utm_source", + "type": "event", + "value": "is_not_set", + "operator": "is_not_set", + }, ], } ], @@ -117,7 +141,13 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: name="Website Unique Users (Breakdown)", description="", filters={ - TREND_FILTER_TYPE_EVENTS: [{"id": "$pageview", "math": UNIQUE_USERS, "type": TREND_FILTER_TYPE_EVENTS}], + TREND_FILTER_TYPE_EVENTS: [ + { + "id": "$pageview", + "math": UNIQUE_USERS, + "type": TREND_FILTER_TYPE_EVENTS, + } + ], INTERVAL: "week", INSIGHT: INSIGHT_TRENDS, DATE_FROM: "-30d", @@ -149,8 +179,18 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: "math": UNIQUE_USERS, "type": TREND_FILTER_TYPE_EVENTS, PROPERTIES: [ - {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"}, - {"key": "utm_source", "type": "event", "value": "is_not_set", "operator": "is_not_set"}, + { + "key": "$referring_domain", + "type": "event", + "value": "google", + "operator": "icontains", + }, + { + "key": "utm_source", + "type": "event", + "value": "is_not_set", + "operator": "is_not_set", + }, ], } ], @@ -283,7 +323,14 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: "values": [ { "type": "AND", - "values": [{"key": "$current_url", "type": "event", "value": "?", "operator": "not_icontains"}], + "values": [ + { + "key": "$current_url", + "type": "event", + "value": "?", + "operator": "not_icontains", + } + ], } ], }, @@ -329,8 +376,18 @@ def _create_website_dashboard(dashboard: Dashboard) -> None: { "type": "AND", "values": [ - {"key": "$current_url", "type": "event", "value": "?", "operator": "not_icontains"}, - {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"}, + { + "key": "$current_url", + "type": "event", + "value": "?", + "operator": "not_icontains", + }, + { + "key": "$referring_domain", + "type": "event", + "value": "google", + "operator": "icontains", + }, ], } ], @@ -403,7 +460,9 @@ def create_from_template(dashboard: Dashboard, template: DashboardTemplate) -> N if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING): for template_tag in template.tags or []: tag, _ = Tag.objects.get_or_create( - name=template_tag, team_id=dashboard.team_id, defaults={"team_id": dashboard.team_id} + name=template_tag, + team_id=dashboard.team_id, + defaults={"team_id": dashboard.team_id}, ) dashboard.tagged_items.create(tag_id=tag.id) dashboard.save() @@ -490,7 +549,9 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None: dashboard.filters = {DATE_FROM: "-30d"} if dashboard.team.organization.is_feature_available(AvailableFeature.TAGGING): tag, _ = Tag.objects.get_or_create( - name="feature flags", team_id=dashboard.team_id, defaults={"team_id": dashboard.team_id} + name="feature flags", + team_id=dashboard.team_id, + defaults={"team_id": dashboard.team_id}, ) dashboard.tagged_items.create(tag_id=tag.id) dashboard.save(update_fields=["filters"]) @@ -502,7 +563,11 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None: description="Shows the number of total calls made on feature flag with key: " + feature_flag.key, filters={ TREND_FILTER_TYPE_EVENTS: [ - {"id": "$feature_flag_called", "name": "$feature_flag_called", "type": TREND_FILTER_TYPE_EVENTS} + { + "id": "$feature_flag_called", + "name": "$feature_flag_called", + "type": TREND_FILTER_TYPE_EVENTS, + } ], INTERVAL: "day", INSIGHT: INSIGHT_TRENDS, @@ -514,7 +579,11 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None: { "type": "AND", "values": [ - {"key": "$feature_flag", "type": "event", "value": feature_flag.key}, + { + "key": "$feature_flag", + "type": "event", + "value": feature_flag.key, + }, ], } ], @@ -562,7 +631,11 @@ def create_feature_flag_dashboard(feature_flag, dashboard: Dashboard) -> None: { "type": "AND", "values": [ - {"key": "$feature_flag", "type": "event", "value": feature_flag.key}, + { + "key": "$feature_flag", + "type": "event", + "value": feature_flag.key, + }, ], } ], @@ -595,7 +668,11 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das description="Shows the total number of times this feature was viewed and interacted with", filters={ TREND_FILTER_TYPE_EVENTS: [ - {"id": "$feature_view", "name": "Feature View - Total", "type": TREND_FILTER_TYPE_EVENTS}, + { + "id": "$feature_view", + "name": "Feature View - Total", + "type": TREND_FILTER_TYPE_EVENTS, + }, { "id": "$feature_view", "name": "Feature View - Unique users", @@ -613,7 +690,11 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das { "type": "AND", "values": [ - {"key": "feature_flag", "type": "event", "value": feature_flag.key}, + { + "key": "feature_flag", + "type": "event", + "value": feature_flag.key, + }, ], } ], @@ -630,7 +711,11 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das description="Shows the total number of times this feature was viewed and interacted with", filters={ TREND_FILTER_TYPE_EVENTS: [ - {"id": "$feature_interaction", "name": "Feature Interaction - Total", "type": TREND_FILTER_TYPE_EVENTS}, + { + "id": "$feature_interaction", + "name": "Feature Interaction - Total", + "type": TREND_FILTER_TYPE_EVENTS, + }, { "id": "$feature_interaction", "name": "Feature Interaction - Unique users", @@ -648,7 +733,11 @@ def add_enriched_insights_to_feature_flag_dashboard(feature_flag, dashboard: Das { "type": "AND", "values": [ - {"key": "feature_flag", "type": "event", "value": feature_flag.key}, + { + "key": "feature_flag", + "type": "event", + "value": feature_flag.key, + }, ], } ], diff --git a/posthog/helpers/tests/test_multi_property_breakdown.py b/posthog/helpers/tests/test_multi_property_breakdown.py index 311cd465ad9b8..cc2dad4bbc57f 100644 --- a/posthog/helpers/tests/test_multi_property_breakdown.py +++ b/posthog/helpers/tests/test_multi_property_breakdown.py @@ -1,7 +1,9 @@ from typing import Any, Dict, List from unittest import TestCase -from posthog.helpers.multi_property_breakdown import protect_old_clients_from_multi_property_default +from posthog.helpers.multi_property_breakdown import ( + protect_old_clients_from_multi_property_default, +) class TestMultiPropertyBreakdown(TestCase): @@ -15,7 +17,11 @@ def test_handles_empty_inputs(self): assert False, "should not raise any KeyError" def test_handles_empty_breakdowns_array(self): - data: Dict[str, Any] = {"breakdowns": [], "insight": "FUNNELS", "breakdown_type": "event"} + data: Dict[str, Any] = { + "breakdowns": [], + "insight": "FUNNELS", + "breakdown_type": "event", + } result: List = [] try: @@ -24,7 +30,11 @@ def test_handles_empty_breakdowns_array(self): assert False, "should not raise any KeyError" def test_keeps_multi_property_breakdown_for_multi_property_requests(self): - data: Dict[str, Any] = {"breakdowns": ["a", "b"], "insight": "FUNNELS", "breakdown_type": "event"} + data: Dict[str, Any] = { + "breakdowns": ["a", "b"], + "insight": "FUNNELS", + "breakdown_type": "event", + } result: List[List[Dict[str, Any]]] = [[{"breakdown": ["a1", "b1"], "breakdown_value": ["a1", "b1"]}]] actual = protect_old_clients_from_multi_property_default(data, result) @@ -38,7 +48,11 @@ def test_keeps_multi_property_breakdown_for_multi_property_requests(self): assert "breakdown" not in data def test_flattens_multi_property_breakdown_for_single_property_requests(self): - data: Dict[str, Any] = {"breakdown": "a", "insight": "FUNNELS", "breakdown_type": "event"} + data: Dict[str, Any] = { + "breakdown": "a", + "insight": "FUNNELS", + "breakdown_type": "event", + } result: List[List[Dict[str, Any]]] = [[{"breakdown": ["a1"], "breakdown_value": ["a1", "b1"]}]] actual = protect_old_clients_from_multi_property_default(data, result) diff --git a/posthog/hogql/ai.py b/posthog/hogql/ai.py index 915d03b77e49c..c53e9814d807a 100644 --- a/posthog/hogql/ai.py +++ b/posthog/hogql/ai.py @@ -85,7 +85,11 @@ def write_sql_from_prompt(prompt: str, *, current_query: Optional[str] = None, t ] if current_query: messages.insert( - -1, {"role": "user", "content": CURRENT_QUERY_MESSAGE.format(current_query_input=current_query)} + -1, + { + "role": "user", + "content": CURRENT_QUERY_MESSAGE.format(current_query_input=current_query), + }, ) candidate_sql: Optional[str] = None @@ -116,7 +120,12 @@ def write_sql_from_prompt(prompt: str, *, current_query: Optional[str] = None, t print_ast(parse_select(candidate_sql), context=context, dialect="clickhouse") except HogQLException as e: messages.append({"role": "assistant", "content": candidate_sql}) - messages.append({"role": "user", "content": f"That query has this problem: {e}. Return fixed query."}) + messages.append( + { + "role": "user", + "content": f"That query has this problem: {e}. Return fixed query.", + } + ) else: generated_valid_hogql = True break diff --git a/posthog/hogql/bytecode.py b/posthog/hogql/bytecode.py index ab468338ca803..7a4a6904527b9 100644 --- a/posthog/hogql/bytecode.py +++ b/posthog/hogql/bytecode.py @@ -3,7 +3,11 @@ from posthog.hogql import ast from posthog.hogql.errors import NotImplementedException from posthog.hogql.visitor import Visitor -from hogvm.python.operation import Operation, HOGQL_BYTECODE_IDENTIFIER, SUPPORTED_FUNCTIONS +from hogvm.python.operation import ( + Operation, + HOGQL_BYTECODE_IDENTIFIER, + SUPPORTED_FUNCTIONS, +) COMPARE_OPERATIONS = { ast.CompareOperationOp.Eq: Operation.EQ, @@ -74,7 +78,11 @@ def visit_compare_operation(self, node: ast.CompareOperation): return [*self.visit(node.right), *self.visit(node.left), operation] def visit_arithmetic_operation(self, node: ast.ArithmeticOperation): - return [*self.visit(node.right), *self.visit(node.left), ARITHMETIC_OPERATIONS[node.op]] + return [ + *self.visit(node.right), + *self.visit(node.left), + ARITHMETIC_OPERATIONS[node.op], + ] def visit_field(self, node: ast.Field): chain = [] diff --git a/posthog/hogql/constants.py b/posthog/hogql/constants.py index cd08da81fca30..0a2806ca99878 100644 --- a/posthog/hogql/constants.py +++ b/posthog/hogql/constants.py @@ -4,7 +4,16 @@ from pydantic import ConfigDict, BaseModel ConstantDataType: TypeAlias = Literal[ - "int", "float", "str", "bool", "array", "tuple", "date", "datetime", "uuid", "unknown" + "int", + "float", + "str", + "bool", + "array", + "tuple", + "date", + "datetime", + "uuid", + "unknown", ] ConstantSupportedPrimitive: TypeAlias = int | float | str | bool | date | datetime | UUID | None ConstantSupportedData: TypeAlias = ( diff --git a/posthog/hogql/context.py b/posthog/hogql/context.py index 65c17ba7006be..7f45d66fa4f83 100644 --- a/posthog/hogql/context.py +++ b/posthog/hogql/context.py @@ -55,7 +55,11 @@ def add_sensitive_value(self, value: Any) -> str: return f"%({key})s" def add_notice( - self, message: str, start: Optional[int] = None, end: Optional[int] = None, fix: Optional[str] = None + self, + message: str, + start: Optional[int] = None, + end: Optional[int] = None, + fix: Optional[str] = None, ): if not any(n.start == start and n.end == end and n.message == message and n.fix == fix for n in self.notices): self.notices.append(HogQLNotice(start=start, end=end, message=message, fix=fix)) diff --git a/posthog/hogql/database/argmax.py b/posthog/hogql/database/argmax.py index a46b068513e6b..0302ac14ddb26 100644 --- a/posthog/hogql/database/argmax.py +++ b/posthog/hogql/database/argmax.py @@ -18,7 +18,12 @@ def argmax_select( fields_to_select: List[ast.Expr] = [] for name, chain in select_fields.items(): if name not in group_fields: - fields_to_select.append(ast.Alias(alias=name, expr=argmax_version(ast.Field(chain=[table_name] + chain)))) + fields_to_select.append( + ast.Alias( + alias=name, + expr=argmax_version(ast.Field(chain=[table_name] + chain)), + ) + ) for key in group_fields: fields_to_group.append(ast.Field(chain=[table_name, key])) fields_to_select.append(ast.Alias(alias=key, expr=ast.Field(chain=[table_name, key]))) diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index 1e5b8dd2cc390..db2791c348d76 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -27,10 +27,19 @@ from posthog.hogql.database.schema.events import EventsTable from posthog.hogql.database.schema.groups import GroupsTable, RawGroupsTable from posthog.hogql.database.schema.numbers import NumbersTable -from posthog.hogql.database.schema.person_distinct_ids import PersonDistinctIdsTable, RawPersonDistinctIdsTable +from posthog.hogql.database.schema.person_distinct_ids import ( + PersonDistinctIdsTable, + RawPersonDistinctIdsTable, +) from posthog.hogql.database.schema.persons import PersonsTable, RawPersonsTable -from posthog.hogql.database.schema.person_overrides import PersonOverridesTable, RawPersonOverridesTable -from posthog.hogql.database.schema.session_replay_events import RawSessionReplayEventsTable, SessionReplayEventsTable +from posthog.hogql.database.schema.person_overrides import ( + PersonOverridesTable, + RawPersonOverridesTable, +) +from posthog.hogql.database.schema.session_replay_events import ( + RawSessionReplayEventsTable, + SessionReplayEventsTable, +) from posthog.hogql.database.schema.static_cohort_people import StaticCohortPeople from posthog.hogql.errors import HogQLException from posthog.models.group_type_mapping import GroupTypeMapping @@ -52,10 +61,10 @@ class Database(BaseModel): cohort_people: CohortPeople = CohortPeople() static_cohort_people: StaticCohortPeople = StaticCohortPeople() log_entries: LogEntriesTable = LogEntriesTable() - console_logs_log_entries: ReplayConsoleLogsLogEntriesTable = ReplayConsoleLogsLogEntriesTable() + console_logs_log_entries: (ReplayConsoleLogsLogEntriesTable) = ReplayConsoleLogsLogEntriesTable() batch_export_log_entries: BatchExportLogEntriesTable = BatchExportLogEntriesTable() - raw_session_replay_events: RawSessionReplayEventsTable = RawSessionReplayEventsTable() + raw_session_replay_events: (RawSessionReplayEventsTable) = RawSessionReplayEventsTable() raw_person_distinct_ids: RawPersonDistinctIdsTable = RawPersonDistinctIdsTable() raw_persons: RawPersonsTable = RawPersonsTable() raw_groups: RawGroupsTable = RawGroupsTable() @@ -111,7 +120,11 @@ def add_warehouse_tables(self, **field_definitions: Any): def create_hogql_database(team_id: int, modifiers: Optional[HogQLQueryModifiers] = None) -> Database: from posthog.models import Team from posthog.hogql.query import create_default_modifiers_for_team - from posthog.warehouse.models import DataWarehouseTable, DataWarehouseSavedQuery, DataWarehouseViewLink + from posthog.warehouse.models import ( + DataWarehouseTable, + DataWarehouseSavedQuery, + DataWarehouseViewLink, + ) team = Team.objects.get(pk=team_id) modifiers = create_default_modifiers_for_team(team, modifiers) @@ -164,29 +177,6 @@ def create_hogql_database(team_id: int, modifiers: Optional[HogQLQueryModifiers] return database -def determine_join_function(view): - def join_function(from_table: str, to_table: str, requested_fields: Dict[str, Any]): - from posthog.hogql import ast - from posthog.hogql.parser import parse_select - - if not requested_fields: - raise HogQLException(f"No fields requested from {to_table}") - - join_expr = ast.JoinExpr(table=parse_select(view.saved_query.query["query"])) - join_expr.join_type = "INNER JOIN" - join_expr.alias = to_table - join_expr.constraint = ast.JoinConstraint( - expr=ast.CompareOperation( - op=ast.CompareOperationOp.Eq, - left=ast.Field(chain=[from_table, view.from_join_key]), - right=ast.Field(chain=[to_table, view.to_join_key]), - ) - ) - return join_expr - - return join_function - - class _SerializedFieldBase(TypedDict): key: str type: Literal[ diff --git a/posthog/hogql/database/models.py b/posthog/hogql/database/models.py index 9c7fcac1e8703..91a3b436a5df0 100644 --- a/posthog/hogql/database/models.py +++ b/posthog/hogql/database/models.py @@ -6,6 +6,7 @@ if TYPE_CHECKING: from posthog.hogql.context import HogQLContext + from posthog.hogql.ast import SelectQuery class FieldOrTable(BaseModel): @@ -101,7 +102,7 @@ def get_asterisk(self): class LazyJoin(FieldOrTable): model_config = ConfigDict(extra="forbid") - join_function: Callable[[str, str, Dict[str, Any], HogQLQueryModifiers], Any] + join_function: Callable[[str, str, Dict[str, Any], "HogQLContext", "SelectQuery"], Any] join_table: Table from_field: str diff --git a/posthog/hogql/database/schema/cohort_people.py b/posthog/hogql/database/schema/cohort_people.py index 097e74856f410..4cce926a61350 100644 --- a/posthog/hogql/database/schema/cohort_people.py +++ b/posthog/hogql/database/schema/cohort_people.py @@ -15,7 +15,11 @@ "person_id": StringDatabaseField(name="person_id"), "cohort_id": IntegerDatabaseField(name="cohort_id"), "team_id": IntegerDatabaseField(name="team_id"), - "person": LazyJoin(from_field="person_id", join_table=PersonsTable(), join_function=join_with_persons_table), + "person": LazyJoin( + from_field="person_id", + join_table=PersonsTable(), + join_function=join_with_persons_table, + ), } @@ -25,7 +29,11 @@ def select_from_cohort_people_table(requested_fields: Dict[str, List[str]]): table_name = "raw_cohort_people" # must always include the person and cohort ids regardless of what other fields are requested - requested_fields = {"person_id": ["person_id"], "cohort_id": ["cohort_id"], **requested_fields} + requested_fields = { + "person_id": ["person_id"], + "cohort_id": ["cohort_id"], + **requested_fields, + } fields: List[ast.Expr] = [ast.Field(chain=[table_name] + chain) for name, chain in requested_fields.items()] return ast.SelectQuery( diff --git a/posthog/hogql/database/schema/event_sessions.py b/posthog/hogql/database/schema/event_sessions.py new file mode 100644 index 0000000000000..a6951478ada4a --- /dev/null +++ b/posthog/hogql/database/schema/event_sessions.py @@ -0,0 +1,178 @@ +from copy import deepcopy +from typing import Any, Dict, List, Optional +from posthog.hogql import ast +from posthog.hogql.context import HogQLContext +from posthog.hogql.database.models import ( + FieldOrTable, + IntegerDatabaseField, + StringDatabaseField, + VirtualTable, +) +from posthog.hogql.parser import parse_select +from posthog.hogql.resolver_utils import get_long_table_name, lookup_field_by_name +from posthog.hogql.visitor import CloningVisitor, TraversingVisitor + + +class EventsSessionSubTable(VirtualTable): + fields: Dict[str, FieldOrTable] = { + "$session_id": StringDatabaseField(name="$session_id"), + "session_duration": IntegerDatabaseField(name="session_duration"), + } + + def to_printed_clickhouse(self, context): + return "events" + + def to_printed_hogql(self): + return "events" + + +class GetFieldsTraverser(TraversingVisitor): + fields: List[ast.Field] + + def __init__(self, expr: ast.Expr): + super().__init__() + self.fields = [] + super().visit(expr) + + def visit_field(self, node: ast.Field): + self.fields.append(node) + + +class CleanTableNameFromChain(CloningVisitor): + def __init__(self, table_name: str, select_query_type: ast.SelectQueryType): + super().__init__() + self.table_name = table_name + self.select_query_type = select_query_type + + def visit_field(self, node: ast.Field): + if len(node.chain) > 1 and str(node.chain[0]) in self.select_query_type.tables: + type = self.select_query_type.tables[str(node.chain[0])] + + name = get_long_table_name(self.select_query_type, type) + if name == self.table_name: + node.chain.pop(0) + + return super().visit_field(node) + + +class WhereClauseExtractor: + compare_operators: List[ast.Expr] + + def __init__( + self, + where_expression: ast.Expr, + from_table_name: str, + select_query_type: ast.SelectQueryType, + ): + self.table_name = from_table_name + self.select_query_type = select_query_type + self.compare_operators = self.run(deepcopy(where_expression)) + + def _is_field_on_table(self, field: ast.Field) -> bool: + if len(field.chain) == 0: + return False + + type: Optional[ast.Type] = None + + # If the field contains at least two parts, the first might be a table. + if len(field.chain) > 1 and str(field.chain[0]) in self.select_query_type.tables: + type = self.select_query_type.tables[str(field.chain[0])] + + name = get_long_table_name(self.select_query_type, type) + if name != self.table_name: + return False + + # Field in scope + if not type: + type = lookup_field_by_name(self.select_query_type, str(field.chain[0])) + + if not type: + return False + + # Recursively resolve the rest of the chain until we can point to the deepest node. + loop_type = type + chain_to_parse = field.chain[1:] + while True: + if isinstance(loop_type, ast.FieldTraverserType): + chain_to_parse = loop_type.chain + chain_to_parse + loop_type = loop_type.table_type + continue + if len(chain_to_parse) == 0: + break + next_chain = chain_to_parse.pop(0) + loop_type = loop_type.get_child(str(next_chain)) + if loop_type is None: + return False + + return True + + def run(self, expr: ast.Expr) -> List[ast.Expr]: + exprs_to_apply: List[ast.Expr] = [] + + if isinstance(expr, ast.And): + for expression in expr.exprs: + if not isinstance(expression, ast.CompareOperation): + continue + + fields = GetFieldsTraverser(expression).fields + res = [self._is_field_on_table(field) for field in fields] + if all(res): + exprs_to_apply.append(expression) + elif isinstance(expr, ast.CompareOperation): + exprs_to_apply.extend(self.run(ast.And(exprs=[expr]))) + elif isinstance(expr, ast.Or): + pass # Ignore for now + + # Clone field nodes and remove table name from field chains + return [ + CleanTableNameFromChain(self.table_name, self.select_query_type).visit( + CloningVisitor(clear_types=True, clear_locations=True).visit(e) + ) + for e in exprs_to_apply + ] + + +def join_with_events_table_session_duration( + from_table: str, + to_table: str, + requested_fields: Dict[str, Any], + context: HogQLContext, + node: ast.SelectQuery, +): + select_query = parse_select( + """ + select "$session_id", dateDiff('second', min(timestamp), max(timestamp)) as session_duration + from events + group by "$session_id" + """ + ) + + if isinstance(select_query, ast.SelectQuery): + compare_operators = ( + WhereClauseExtractor(node.where, from_table, node.type).compare_operators + if node.where and node.type + else [] + ) + select_query.where = ast.And( + exprs=[ + *compare_operators, + ast.CompareOperation( + left=ast.Field(chain=["$session_id"]), + op=ast.CompareOperationOp.NotEq, + right=ast.Constant(value=""), + ), + ] + ) + + join_expr = ast.JoinExpr(table=select_query) + join_expr.join_type = "INNER JOIN" + join_expr.alias = to_table + join_expr.constraint = ast.JoinConstraint( + expr=ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=ast.Field(chain=[from_table, "$session_id"]), + right=ast.Field(chain=[to_table, "$session_id"]), + ) + ) + + return join_expr diff --git a/posthog/hogql/database/schema/events.py b/posthog/hogql/database/schema/events.py index ba27ff7c5e158..e90142c290c72 100644 --- a/posthog/hogql/database/schema/events.py +++ b/posthog/hogql/database/schema/events.py @@ -11,12 +11,19 @@ FieldTraverser, FieldOrTable, ) +from posthog.hogql.database.schema.event_sessions import ( + EventsSessionSubTable, + join_with_events_table_session_duration, +) from posthog.hogql.database.schema.groups import GroupsTable, join_with_group_n_table from posthog.hogql.database.schema.person_distinct_ids import ( PersonDistinctIdsTable, join_with_person_distinct_ids_table, ) -from posthog.hogql.database.schema.person_overrides import PersonOverridesTable, join_with_person_overrides_table +from posthog.hogql.database.schema.person_overrides import ( + PersonOverridesTable, + join_with_person_overrides_table, +) class EventsPersonSubTable(VirtualTable): @@ -88,15 +95,40 @@ class EventsTable(Table): "person": FieldTraverser(chain=["pdi", "person"]), "person_id": FieldTraverser(chain=["pdi", "person_id"]), "$group_0": StringDatabaseField(name="$group_0"), - "group_0": LazyJoin(from_field="$group_0", join_table=GroupsTable(), join_function=join_with_group_n_table(0)), + "group_0": LazyJoin( + from_field="$group_0", + join_table=GroupsTable(), + join_function=join_with_group_n_table(0), + ), "$group_1": StringDatabaseField(name="$group_1"), - "group_1": LazyJoin(from_field="$group_1", join_table=GroupsTable(), join_function=join_with_group_n_table(1)), + "group_1": LazyJoin( + from_field="$group_1", + join_table=GroupsTable(), + join_function=join_with_group_n_table(1), + ), "$group_2": StringDatabaseField(name="$group_2"), - "group_2": LazyJoin(from_field="$group_2", join_table=GroupsTable(), join_function=join_with_group_n_table(2)), + "group_2": LazyJoin( + from_field="$group_2", + join_table=GroupsTable(), + join_function=join_with_group_n_table(2), + ), "$group_3": StringDatabaseField(name="$group_3"), - "group_3": LazyJoin(from_field="$group_3", join_table=GroupsTable(), join_function=join_with_group_n_table(3)), + "group_3": LazyJoin( + from_field="$group_3", + join_table=GroupsTable(), + join_function=join_with_group_n_table(3), + ), "$group_4": StringDatabaseField(name="$group_4"), - "group_4": LazyJoin(from_field="$group_4", join_table=GroupsTable(), join_function=join_with_group_n_table(4)), + "group_4": LazyJoin( + from_field="$group_4", + join_table=GroupsTable(), + join_function=join_with_group_n_table(4), + ), + "session": LazyJoin( + from_field="$session_id", + join_table=EventsSessionSubTable(), + join_function=join_with_events_table_session_duration, + ), } def to_printed_clickhouse(self, context): diff --git a/posthog/hogql/database/schema/groups.py b/posthog/hogql/database/schema/groups.py index 9b3fc1f28c176..39382b246349b 100644 --- a/posthog/hogql/database/schema/groups.py +++ b/posthog/hogql/database/schema/groups.py @@ -1,4 +1,6 @@ from typing import Any, Dict, List +from posthog.hogql.ast import SelectQuery +from posthog.hogql.context import HogQLContext from posthog.hogql.database.argmax import argmax_select from posthog.hogql.database.models import ( @@ -34,7 +36,11 @@ def select_from_groups_table(requested_fields: Dict[str, List[str]]): def join_with_group_n_table(group_index: int): def join_with_group_table( - from_table: str, to_table: str, requested_fields: Dict[str, Any], modifiers: HogQLQueryModifiers + from_table: str, + to_table: str, + requested_fields: Dict[str, Any], + context: HogQLContext, + node: SelectQuery, ): from posthog.hogql import ast @@ -43,7 +49,9 @@ def join_with_group_table( select_query = select_from_groups_table(requested_fields) select_query.where = ast.CompareOperation( - left=ast.Field(chain=["index"]), op=ast.CompareOperationOp.Eq, right=ast.Constant(value=group_index) + left=ast.Field(chain=["index"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value=group_index), ) join_expr = ast.JoinExpr(table=select_query) diff --git a/posthog/hogql/database/schema/person_distinct_ids.py b/posthog/hogql/database/schema/person_distinct_ids.py index 3765c44673890..d5785bef98c49 100644 --- a/posthog/hogql/database/schema/person_distinct_ids.py +++ b/posthog/hogql/database/schema/person_distinct_ids.py @@ -1,4 +1,6 @@ from typing import Dict, List +from posthog.hogql.ast import SelectQuery +from posthog.hogql.context import HogQLContext from posthog.hogql.database.argmax import argmax_select from posthog.hogql.database.models import ( @@ -18,7 +20,11 @@ "team_id": IntegerDatabaseField(name="team_id"), "distinct_id": StringDatabaseField(name="distinct_id"), "person_id": StringDatabaseField(name="person_id"), - "person": LazyJoin(from_field="person_id", join_table=PersonsTable(), join_function=join_with_persons_table), + "person": LazyJoin( + from_field="person_id", + join_table=PersonsTable(), + join_function=join_with_persons_table, + ), } @@ -36,7 +42,11 @@ def select_from_person_distinct_ids_table(requested_fields: Dict[str, List[str]] def join_with_person_distinct_ids_table( - from_table: str, to_table: str, requested_fields: Dict[str, List[str]], modifiers: HogQLQueryModifiers + from_table: str, + to_table: str, + requested_fields: Dict[str, List[str]], + context: HogQLContext, + node: SelectQuery, ): from posthog.hogql import ast diff --git a/posthog/hogql/database/schema/person_overrides.py b/posthog/hogql/database/schema/person_overrides.py index 9e2e92656867c..800e902d343fe 100644 --- a/posthog/hogql/database/schema/person_overrides.py +++ b/posthog/hogql/database/schema/person_overrides.py @@ -1,4 +1,6 @@ from typing import Any, Dict, List +from posthog.hogql.ast import SelectQuery +from posthog.hogql.context import HogQLContext from posthog.hogql.database.argmax import argmax_select from posthog.hogql.database.models import ( @@ -32,7 +34,11 @@ def select_from_person_overrides_table(requested_fields: Dict[str, List[str]]): def join_with_person_overrides_table( - from_table: str, to_table: str, requested_fields: Dict[str, Any], modifiers: HogQLQueryModifiers + from_table: str, + to_table: str, + requested_fields: Dict[str, Any], + context: HogQLContext, + node: SelectQuery, ): from posthog.hogql import ast diff --git a/posthog/hogql/database/schema/persons.py b/posthog/hogql/database/schema/persons.py index 6df5513f316cf..1a1d79123436d 100644 --- a/posthog/hogql/database/schema/persons.py +++ b/posthog/hogql/database/schema/persons.py @@ -1,6 +1,8 @@ from typing import Dict, List +from posthog.hogql.ast import SelectQuery from posthog.hogql.constants import HogQLQuerySettings +from posthog.hogql.context import HogQLContext from posthog.hogql.database.argmax import argmax_select from posthog.hogql.database.models import ( Table, @@ -81,13 +83,17 @@ def select_from_persons_table(requested_fields: Dict[str, List[str]], modifiers: def join_with_persons_table( - from_table: str, to_table: str, requested_fields: Dict[str, List[str]], modifiers: HogQLQueryModifiers + from_table: str, + to_table: str, + requested_fields: Dict[str, List[str]], + context: HogQLContext, + node: SelectQuery, ): from posthog.hogql import ast if not requested_fields: raise HogQLException("No fields requested from persons table") - join_expr = ast.JoinExpr(table=select_from_persons_table(requested_fields, modifiers)) + join_expr = ast.JoinExpr(table=select_from_persons_table(requested_fields, context.modifiers)) join_expr.join_type = "INNER JOIN" join_expr.alias = to_table join_expr.constraint = ast.JoinConstraint( diff --git a/posthog/hogql/database/schema/persons_pdi.py b/posthog/hogql/database/schema/persons_pdi.py index 8f83234b6bed3..9c7fcf9e03e43 100644 --- a/posthog/hogql/database/schema/persons_pdi.py +++ b/posthog/hogql/database/schema/persons_pdi.py @@ -1,4 +1,6 @@ from typing import Dict, List +from posthog.hogql.ast import SelectQuery +from posthog.hogql.context import HogQLContext from posthog.hogql.database.argmax import argmax_select from posthog.hogql.database.models import ( @@ -29,7 +31,11 @@ def persons_pdi_select(requested_fields: Dict[str, List[str]]): # :NOTE: We already have person_distinct_ids.py, which most tables link to. This persons_pdi.py is a hack to # make "select persons.pdi.distinct_id from persons" work while avoiding circular imports. Don't use directly. def persons_pdi_join( - from_table: str, to_table: str, requested_fields: Dict[str, List[str]], modifiers: HogQLQueryModifiers + from_table: str, + to_table: str, + requested_fields: Dict[str, List[str]], + context: HogQLContext, + node: SelectQuery, ): from posthog.hogql import ast diff --git a/posthog/hogql/database/schema/static_cohort_people.py b/posthog/hogql/database/schema/static_cohort_people.py index d09a7479f080d..c9737f86c6af9 100644 --- a/posthog/hogql/database/schema/static_cohort_people.py +++ b/posthog/hogql/database/schema/static_cohort_people.py @@ -1,6 +1,12 @@ from typing import Dict -from posthog.hogql.database.models import Table, StringDatabaseField, IntegerDatabaseField, LazyJoin, FieldOrTable +from posthog.hogql.database.models import ( + Table, + StringDatabaseField, + IntegerDatabaseField, + LazyJoin, + FieldOrTable, +) from posthog.hogql.database.schema.persons import PersonsTable, join_with_persons_table @@ -9,7 +15,11 @@ class StaticCohortPeople(Table): "person_id": StringDatabaseField(name="person_id"), "cohort_id": IntegerDatabaseField(name="cohort_id"), "team_id": IntegerDatabaseField(name="team_id"), - "person": LazyJoin(from_field="person_id", join_table=PersonsTable(), join_function=join_with_persons_table), + "person": LazyJoin( + from_field="person_id", + join_table=PersonsTable(), + join_function=join_with_persons_table, + ), } def avoid_asterisk_fields(self): diff --git a/posthog/hogql/database/schema/test/test_event_sessions.py b/posthog/hogql/database/schema/test/test_event_sessions.py new file mode 100644 index 0000000000000..268180a773e6c --- /dev/null +++ b/posthog/hogql/database/schema/test/test_event_sessions.py @@ -0,0 +1,220 @@ +from typing import List, cast +from posthog.hogql import ast +from posthog.hogql.context import HogQLContext +from posthog.hogql.database.database import create_hogql_database +from posthog.hogql.database.schema.event_sessions import ( + CleanTableNameFromChain, + WhereClauseExtractor, +) +from posthog.hogql.parser import parse_expr, parse_select +from posthog.hogql.resolver import resolve_types +from posthog.hogql.visitor import clone_expr +from posthog.test.base import BaseTest + + +class TestWhereClauseExtractor(BaseTest): + def setUp(self): + self.database = create_hogql_database(self.team.pk) + self.context = HogQLContext(database=self.database, team_id=self.team.pk) + + def _select(self, query: str) -> ast.SelectQuery: + select_query = cast(ast.SelectQuery, clone_expr(parse_select(query), clear_locations=True)) + return cast(ast.SelectQuery, resolve_types(select_query, self.context)) + + def _compare_operators(self, query: ast.SelectQuery, table_name: str) -> List[ast.Expr]: + assert query.where is not None and query.type is not None + return WhereClauseExtractor(query.where, table_name, query.type).compare_operators + + def test_with_simple_equality_clause(self): + query = self._select( + """ + SELECT event + FROM events + WHERE event = '$pageview' + """ + ) + + compare_operators = self._compare_operators(query, "events") + + assert len(compare_operators) == 1 + assert compare_operators[0] == ast.CompareOperation( + left=ast.Field(chain=["event"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value="$pageview"), + ) + + def test_with_timestamps(self): + query = self._select( + """ + SELECT event + FROM events + WHERE timestamp > '2023-01-01' + """ + ) + + compare_operators = self._compare_operators(query, "events") + + assert len(compare_operators) == 1 + assert compare_operators[0] == ast.CompareOperation( + left=ast.Field(chain=["timestamp"]), + op=ast.CompareOperationOp.Gt, + right=ast.Constant(value="2023-01-01"), + ) + + def test_with_alias_table(self): + query = self._select( + """ + SELECT e.event + FROM events e + WHERE e.event = '$pageview' + """ + ) + + compare_operators = self._compare_operators(query, "e") + + assert len(compare_operators) == 1 + assert compare_operators[0] == ast.CompareOperation( + left=ast.Field(chain=["event"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value="$pageview"), + ) + + def test_with_multiple_clauses(self): + query = self._select( + """ + SELECT event + FROM events + WHERE event = '$pageview' AND timestamp > '2023-01-01' + """ + ) + + compare_operators = self._compare_operators(query, "events") + + assert len(compare_operators) == 2 + assert compare_operators[0] == ast.CompareOperation( + left=ast.Field(chain=["event"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value="$pageview"), + ) + assert compare_operators[1] == ast.CompareOperation( + left=ast.Field(chain=["timestamp"]), + op=ast.CompareOperationOp.Gt, + right=ast.Constant(value="2023-01-01"), + ) + + def test_with_join(self): + query = self._select( + """ + SELECT e.event, p.id + FROM events e + LEFT JOIN persons p + ON e.person_id = p.id + WHERE e.event = '$pageview' and p.is_identified = 0 + """ + ) + + compare_operators = self._compare_operators(query, "e") + + assert len(compare_operators) == 1 + assert compare_operators[0] == ast.CompareOperation( + left=ast.Field(chain=["event"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value="$pageview"), + ) + + def test_with_ignoring_ors(self): + query = self._select( + """ + SELECT event + FROM events + WHERE event = '$pageleave' OR event = '$pageview' + """ + ) + + compare_operators = self._compare_operators(query, "events") + + assert len(compare_operators) == 0 + + +class TestCleanTableNameFromChain(BaseTest): + def setUp(self): + self.database = create_hogql_database(self.team.pk) + self.context = HogQLContext(database=self.database, team_id=self.team.pk) + + def _select(self, query: str) -> ast.SelectQuery: + select_query = cast(ast.SelectQuery, clone_expr(parse_select(query), clear_locations=True)) + return cast(ast.SelectQuery, resolve_types(select_query, self.context)) + + def _clean(self, table_name: str, query: ast.SelectQuery, expr: ast.Expr) -> ast.Expr: + assert query.type is not None + return CleanTableNameFromChain(table_name, query.type).visit(expr) + + def test_table_with_no_alias(self): + query = self._select( + """ + SELECT event + FROM events + """ + ) + + expr = parse_expr('event = "$pageview"') + cleaned_expr = cast(ast.CompareOperation, self._clean("events", query, expr)) + expr_left = cast(ast.Field, cleaned_expr.left) + + assert expr_left.chain == ["event"] + + def test_table_with_alias(self): + query = self._select( + """ + SELECT e.event + FROM events e + """ + ) + + expr = parse_expr('e.event = "$pageview"') + cleaned_expr = cast(ast.CompareOperation, self._clean("e", query, expr)) + expr_left = cast(ast.Field, cleaned_expr.left) + + assert expr_left.chain == ["event"] + + def test_field_with_properties(self): + query = self._select( + """ + SELECT event + FROM events + """ + ) + + expr = parse_expr('properties.$browser = "Chrome"') + cleaned_expr = cast(ast.CompareOperation, self._clean("events", query, expr)) + expr_left = cast(ast.Field, cleaned_expr.left) + + assert expr_left.chain == ["properties", "$browser"] + + def test_table_alias_and_field_with_properties(self): + query = self._select( + """ + SELECT e.event + FROM events e + """ + ) + + expr = parse_expr('e.properties.$browser = "Chrome"') + cleaned_expr = cast(ast.CompareOperation, self._clean("e", query, expr)) + expr_left = cast(ast.Field, cleaned_expr.left) + + assert expr_left.chain == ["properties", "$browser"] + + def test_with_incorrect_alias(self): + query = self._select( + """ + SELECT e.event + FROM events e + """ + ) + + expr = parse_expr('e.event = "$pageview"') + cleaned_expr = cast(ast.CompareOperation, self._clean("some_other_alias", query, expr)) + expr_left = cast(ast.Field, cleaned_expr.left) + + assert expr_left.chain == ["e", "event"] diff --git a/posthog/hogql/database/test/__snapshots__/test_database.ambr b/posthog/hogql/database/test/__snapshots__/test_database.ambr index 1a0efafd1a4c6..90bc08c457891 100644 --- a/posthog/hogql/database/test/__snapshots__/test_database.ambr +++ b/posthog/hogql/database/test/__snapshots__/test_database.ambr @@ -226,6 +226,15 @@ "updated_at", "properties" ] + }, + { + "key": "session", + "type": "lazy_table", + "table": "events", + "fields": [ + "$session_id", + "session_duration" + ] } ], "groups": [ @@ -1007,6 +1016,15 @@ "updated_at", "properties" ] + }, + { + "key": "session", + "type": "lazy_table", + "table": "events", + "fields": [ + "$session_id", + "session_duration" + ] } ], "groups": [ diff --git a/posthog/hogql/database/test/tables.py b/posthog/hogql/database/test/tables.py index f675f3c8d194d..f3328091791b7 100644 --- a/posthog/hogql/database/test/tables.py +++ b/posthog/hogql/database/test/tables.py @@ -1,4 +1,8 @@ -from posthog.hogql.database.models import DateDatabaseField, IntegerDatabaseField, FloatDatabaseField +from posthog.hogql.database.models import ( + DateDatabaseField, + IntegerDatabaseField, + FloatDatabaseField, +) from posthog.hogql.database.s3_table import S3Table from posthog.hogql.database.models import SavedQuery diff --git a/posthog/hogql/database/test/test_argmax.py b/posthog/hogql/database/test/test_argmax.py index 535c17cae97ed..8c61ecd4a29c4 100644 --- a/posthog/hogql/database/test/test_argmax.py +++ b/posthog/hogql/database/test/test_argmax.py @@ -58,7 +58,10 @@ def test_argmax_select_deleted(self): op=ast.CompareOperationOp.Eq, left=ast.Call( name="argMax", - args=[ast.Field(chain=["raw_persons", "is_deleted"]), ast.Field(chain=["raw_persons", "version"])], + args=[ + ast.Field(chain=["raw_persons", "is_deleted"]), + ast.Field(chain=["raw_persons", "version"]), + ], ), right=ast.Constant(value=0), ), diff --git a/posthog/hogql/database/test/test_database.py b/posthog/hogql/database/test/test_database.py index 1ea0583c4e349..16bfeb9e4a392 100644 --- a/posthog/hogql/database/test/test_database.py +++ b/posthog/hogql/database/test/test_database.py @@ -51,7 +51,11 @@ def test_database_with_warehouse_tables(self, patch_execute): team=self.team, access_key="_accesskey", access_secret="_secret" ) DataWarehouseTable.objects.create( - name="whatever", team=self.team, columns={"id": "String"}, credential=credential, url_pattern="" + name="whatever", + team=self.team, + columns={"id": "String"}, + credential=credential, + url_pattern="", ) create_hogql_database(team_id=self.team.pk) diff --git a/posthog/hogql/database/test/test_s3_table.py b/posthog/hogql/database/test/test_s3_table.py index 1711aebb688a6..72b5dfa6cf3c0 100644 --- a/posthog/hogql/database/test/test_s3_table.py +++ b/posthog/hogql/database/test/test_s3_table.py @@ -27,7 +27,10 @@ def test_s3_table_select(self): self._init_database() hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql") - self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10") + self.assertEqual( + hogql, + "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10", + ) clickhouse = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="clickhouse") @@ -80,7 +83,8 @@ def test_s3_table_select_join_with_alias(self): dialect="hogql", ) self.assertEqual( - hogql, "SELECT a.High, a.Low FROM aapl_stock AS a JOIN aapl_stock AS b ON equals(a.High, b.High) LIMIT 10" + hogql, + "SELECT a.High, a.Low FROM aapl_stock AS a JOIN aapl_stock AS b ON equals(a.High, b.High) LIMIT 10", ) clickhouse = self._select( @@ -180,7 +184,8 @@ def test_s3_table_select_in(self): self._init_database() hogql = self._select( - query="SELECT uuid, event FROM events WHERE event IN (SELECT Date FROM aapl_stock)", dialect="hogql" + query="SELECT uuid, event FROM events WHERE event IN (SELECT Date FROM aapl_stock)", + dialect="hogql", ) self.assertEqual( hogql, @@ -188,7 +193,8 @@ def test_s3_table_select_in(self): ) clickhouse = self._select( - query="SELECT uuid, event FROM events WHERE event IN (SELECT Date FROM aapl_stock)", dialect="clickhouse" + query="SELECT uuid, event FROM events WHERE event IN (SELECT Date FROM aapl_stock)", + dialect="clickhouse", ) self.assertEqual( diff --git a/posthog/hogql/database/test/test_saved_query.py b/posthog/hogql/database/test/test_saved_query.py index 5e64f9760fcbf..7c7f534c66f21 100644 --- a/posthog/hogql/database/test/test_saved_query.py +++ b/posthog/hogql/database/test/test_saved_query.py @@ -35,7 +35,10 @@ def test_saved_query_table_select(self): self._init_database() hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql") - self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10") + self.assertEqual( + hogql, + "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10", + ) clickhouse = self._select(query="SELECT * FROM aapl_stock_view LIMIT 10", dialect="clickhouse") @@ -48,9 +51,15 @@ def test_saved_query_with_alias(self): self._init_database() hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql") - self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10") + self.assertEqual( + hogql, + "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10", + ) - clickhouse = self._select(query="SELECT * FROM aapl_stock_view AS some_alias LIMIT 10", dialect="clickhouse") + clickhouse = self._select( + query="SELECT * FROM aapl_stock_view AS some_alias LIMIT 10", + dialect="clickhouse", + ) self.assertEqual( clickhouse, diff --git a/posthog/hogql/database/test/test_view.py b/posthog/hogql/database/test/test_view.py index 3d773314e1f8f..26ce89e10653c 100644 --- a/posthog/hogql/database/test/test_view.py +++ b/posthog/hogql/database/test/test_view.py @@ -35,7 +35,10 @@ def test_view_table_select(self): self._init_database() hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql") - self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10") + self.assertEqual( + hogql, + "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10", + ) clickhouse = self._select(query="SELECT * FROM aapl_stock_view LIMIT 10", dialect="clickhouse") @@ -48,9 +51,15 @@ def test_view_with_alias(self): self._init_database() hogql = self._select(query="SELECT * FROM aapl_stock LIMIT 10", dialect="hogql") - self.assertEqual(hogql, "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10") + self.assertEqual( + hogql, + "SELECT Date, Open, High, Low, Close, Volume, OpenInt FROM aapl_stock LIMIT 10", + ) - clickhouse = self._select(query="SELECT * FROM aapl_stock_view AS some_alias LIMIT 10", dialect="clickhouse") + clickhouse = self._select( + query="SELECT * FROM aapl_stock_view AS some_alias LIMIT 10", + dialect="clickhouse", + ) self.assertEqual( clickhouse, diff --git a/posthog/hogql/errors.py b/posthog/hogql/errors.py index 5dd36c2bf7143..4035e30eed173 100644 --- a/posthog/hogql/errors.py +++ b/posthog/hogql/errors.py @@ -11,7 +11,12 @@ class HogQLException(Exception): end: Optional[int] def __init__( - self, message: str, *, start: Optional[int] = None, end: Optional[int] = None, node: Optional["Expr"] = None + self, + message: str, + *, + start: Optional[int] = None, + end: Optional[int] = None, + node: Optional["Expr"] = None, ): super().__init__(message) if node is not None and node.start is not None and node.end is not None: diff --git a/posthog/hogql/escape_sql.py b/posthog/hogql/escape_sql.py index 68e326ede1437..d6c9b4bfefd99 100644 --- a/posthog/hogql/escape_sql.py +++ b/posthog/hogql/escape_sql.py @@ -54,19 +54,25 @@ def escape_clickhouse_identifier(identifier: str) -> str: def escape_hogql_string( - name: float | int | str | list | tuple | date | datetime | UUID | UUIDT, timezone: Optional[str] = None + name: float | int | str | list | tuple | date | datetime | UUID | UUIDT, + timezone: Optional[str] = None, ) -> str: return SQLValueEscaper(timezone=timezone, dialect="hogql").visit(name) def escape_clickhouse_string( - name: float | int | str | list | tuple | date | datetime | UUID | UUIDT, timezone: Optional[str] = None + name: float | int | str | list | tuple | date | datetime | UUID | UUIDT, + timezone: Optional[str] = None, ) -> str: return SQLValueEscaper(timezone=timezone, dialect="clickhouse").visit(name) class SQLValueEscaper: - def __init__(self, timezone: Optional[str] = None, dialect: Literal["hogql", "clickhouse"] = "clickhouse"): + def __init__( + self, + timezone: Optional[str] = None, + dialect: Literal["hogql", "clickhouse"] = "clickhouse", + ): self._timezone = timezone or "UTC" self._dialect = dialect diff --git a/posthog/hogql/filters.py b/posthog/hogql/filters.py index 61f992ac86688..c900ac1bc5ea6 100644 --- a/posthog/hogql/filters.py +++ b/posthog/hogql/filters.py @@ -59,7 +59,12 @@ def visit_placeholder(self, node): parsed_date = isoparse(dateTo) except ValueError: parsed_date = relative_date_parse(dateTo, self.team.timezone_info) - exprs.append(parse_expr("timestamp < {timestamp}", {"timestamp": ast.Constant(value=parsed_date)})) + exprs.append( + parse_expr( + "timestamp < {timestamp}", + {"timestamp": ast.Constant(value=parsed_date)}, + ) + ) # limit to the last 30d by default dateFrom = self.filters.dateRange.date_from if self.filters.dateRange else None @@ -68,7 +73,12 @@ def visit_placeholder(self, node): parsed_date = isoparse(dateFrom) except ValueError: parsed_date = relative_date_parse(dateFrom, self.team.timezone_info) - exprs.append(parse_expr("timestamp >= {timestamp}", {"timestamp": ast.Constant(value=parsed_date)})) + exprs.append( + parse_expr( + "timestamp >= {timestamp}", + {"timestamp": ast.Constant(value=parsed_date)}, + ) + ) if len(exprs) == 0: return ast.Constant(value=True) diff --git a/posthog/hogql/functions/mapping.py b/posthog/hogql/functions/mapping.py index 0e4ba1623b28a..8d8fca037f21a 100644 --- a/posthog/hogql/functions/mapping.py +++ b/posthog/hogql/functions/mapping.py @@ -144,7 +144,11 @@ class HogQLFunctionMeta: "toFloat": HogQLFunctionMeta("toFloat64OrNull", 1, 1), "toDecimal": HogQLFunctionMeta("toDecimal64OrNull", 1, 1), "toDate": HogQLFunctionMeta( - "toDateOrNull", 1, 1, overloads=[((ast.DateTimeType, ast.DateType), "toDate")], tz_aware=True + "toDateOrNull", + 1, + 1, + overloads=[((ast.DateTimeType, ast.DateType), "toDate")], + tz_aware=True, ), "toDateTime": HogQLFunctionMeta( "parseDateTime64BestEffortOrNull", @@ -731,6 +735,17 @@ class HogQLFunctionMeta: # TODO: Make the below details part of function meta # Functions where we use a -OrNull variant by default -ADD_OR_NULL_DATETIME_FUNCTIONS = ("toDateTime", "parseDateTime", "parseDateTimeBestEffort") +ADD_OR_NULL_DATETIME_FUNCTIONS = ( + "toDateTime", + "parseDateTime", + "parseDateTimeBestEffort", +) # Functions where the first argument needs to be DateTime and not DateTime64 -FIRST_ARG_DATETIME_FUNCTIONS = ("tumble", "tumbleStart", "tumbleEnd", "hop", "hopStart", "hopEnd") +FIRST_ARG_DATETIME_FUNCTIONS = ( + "tumble", + "tumbleStart", + "tumbleEnd", + "hop", + "hopStart", + "hopEnd", +) diff --git a/posthog/hogql/functions/test/test_cohort.py b/posthog/hogql/functions/test/test_cohort.py index c9adaffbba8a0..f893eea1e5e68 100644 --- a/posthog/hogql/functions/test/test_cohort.py +++ b/posthog/hogql/functions/test/test_cohort.py @@ -8,7 +8,12 @@ from posthog.models.cohort.util import recalculate_cohortpeople from posthog.models.utils import UUIDT from posthog.schema import HogQLQueryModifiers -from posthog.test.base import BaseTest, _create_person, _create_event, flush_persons_and_events +from posthog.test.base import ( + BaseTest, + _create_person, + _create_event, + flush_persons_and_events, +) elements_chain_match = lambda x: parse_expr("match(elements_chain, {regex})", {"regex": ast.Constant(value=str(x))}) not_call = lambda x: ast.Call(name="not", args=[x]) @@ -33,7 +38,8 @@ def _create_random_events(self) -> str: def test_in_cohort_dynamic(self): random_uuid = self._create_random_events() cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}] + team=self.team, + groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}], ) recalculate_cohortpeople(cohort, pending_version=0) response = execute_hogql_query( @@ -100,5 +106,8 @@ def test_in_cohort_error(self): self.assertEqual(str(e.exception), "cohort() takes exactly one string or integer argument") with self.assertRaises(HogQLException) as e: - execute_hogql_query(f"SELECT event FROM events WHERE person_id IN COHORT 'blabla'", self.team) + execute_hogql_query( + f"SELECT event FROM events WHERE person_id IN COHORT 'blabla'", + self.team, + ) self.assertEqual(str(e.exception), "Could not find a cohort with the name 'blabla'") diff --git a/posthog/hogql/functions/test/test_sparkline.py b/posthog/hogql/functions/test/test_sparkline.py index febffcf2b8948..2a5c24d90b1af 100644 --- a/posthog/hogql/functions/test/test_sparkline.py +++ b/posthog/hogql/functions/test/test_sparkline.py @@ -14,7 +14,10 @@ def test_sparkline(self): response.hogql, f"SELECT tuple('__hogql_chart_type', 'sparkline', 'results', [1, 2, 3]) LIMIT 100", ) - self.assertEqual(response.results[0][0], ("__hogql_chart_type", "sparkline", "results", [1, 2, 3])) + self.assertEqual( + response.results[0][0], + ("__hogql_chart_type", "sparkline", "results", [1, 2, 3]), + ) def test_sparkline_error(self): with self.assertRaises(HogQLException) as e: diff --git a/posthog/hogql/hogql.py b/posthog/hogql/hogql.py index 87a2e0ee8f47e..6410bdc6a7d46 100644 --- a/posthog/hogql/hogql.py +++ b/posthog/hogql/hogql.py @@ -3,7 +3,11 @@ from posthog.hogql import ast from posthog.hogql.context import HogQLContext from posthog.hogql.database.database import create_hogql_database -from posthog.hogql.errors import HogQLException, NotImplementedException, SyntaxException +from posthog.hogql.errors import ( + HogQLException, + NotImplementedException, + SyntaxException, +) from posthog.hogql.parser import parse_expr from posthog.hogql.printer import prepare_ast_for_printing, print_prepared_ast @@ -38,7 +42,10 @@ def translate_hogql( prepare_ast_for_printing(select_query, context=context, dialect=dialect, stack=[select_query]), ) return print_prepared_ast( - prepared_select_query.select[0], context=context, dialect=dialect, stack=[prepared_select_query] + prepared_select_query.select[0], + context=context, + dialect=dialect, + stack=[prepared_select_query], ) except (NotImplementedException, SyntaxException): raise diff --git a/posthog/hogql/metadata.py b/posthog/hogql/metadata.py index de044ed2c4743..29a9b11075ab0 100644 --- a/posthog/hogql/metadata.py +++ b/posthog/hogql/metadata.py @@ -31,7 +31,9 @@ def get_hogql_metadata( translate_hogql(query.expr, context=context, table=query.table or "events") elif isinstance(query.select, str): context = HogQLContext( - team_id=team.pk, modifiers=create_default_modifiers_for_team(team), enable_select_queries=True + team_id=team.pk, + modifiers=create_default_modifiers_for_team(team), + enable_select_queries=True, ) select_ast = parse_select(query.select) diff --git a/posthog/hogql/parser.py b/posthog/hogql/parser.py index cacae5eefec95..d1d8ef2a1b7a7 100644 --- a/posthog/hogql/parser.py +++ b/posthog/hogql/parser.py @@ -7,7 +7,11 @@ from posthog.hogql import ast from posthog.hogql.base import AST from posthog.hogql.constants import RESERVED_KEYWORDS -from posthog.hogql.errors import NotImplementedException, HogQLException, SyntaxException +from posthog.hogql.errors import ( + NotImplementedException, + HogQLException, + SyntaxException, +) from posthog.hogql.grammar.HogQLLexer import HogQLLexer from posthog.hogql.grammar.HogQLParser import HogQLParser from posthog.hogql.parse_string import parse_string, parse_string_literal @@ -211,7 +215,11 @@ def visitSelectStmt(self, ctx: HogQLParser.SelectStmtContext): select_query.array_join_list = self.visit(array_join_clause.columnExprList()) for expr in select_query.array_join_list: if not isinstance(expr, ast.Alias): - raise SyntaxException("ARRAY JOIN arrays must have an alias", start=expr.start, end=expr.end) + raise SyntaxException( + "ARRAY JOIN arrays must have an alias", + start=expr.start, + end=expr.end, + ) if ctx.topClause(): raise NotImplementedException(f"Unsupported: SelectStmt.topClause()") @@ -382,7 +390,8 @@ def visitRatioExpr(self, ctx: HogQLParser.RatioExprContext): right = number_literals[1] if ctx.SLASH() and len(number_literals) > 1 else None return ast.RatioExpr( - left=self.visitNumberLiteral(left), right=self.visitNumberLiteral(right) if right else None + left=self.visitNumberLiteral(left), + right=self.visitNumberLiteral(right) if right else None, ) def visitSettingExprList(self, ctx: HogQLParser.SettingExprListContext): @@ -455,7 +464,11 @@ def visitColumnExprList(self, ctx: HogQLParser.ColumnExprListContext): def visitColumnExprTernaryOp(self, ctx: HogQLParser.ColumnExprTernaryOpContext): return ast.Call( name="if", - args=[self.visit(ctx.columnExpr(0)), self.visit(ctx.columnExpr(1)), self.visit(ctx.columnExpr(2))], + args=[ + self.visit(ctx.columnExpr(0)), + self.visit(ctx.columnExpr(1)), + self.visit(ctx.columnExpr(2)), + ], ) def visitColumnExprAlias(self, ctx: HogQLParser.ColumnExprAliasContext): @@ -480,7 +493,9 @@ def visitColumnExprExtract(self, ctx: HogQLParser.ColumnExprExtractContext): def visitColumnExprNegate(self, ctx: HogQLParser.ColumnExprNegateContext): return ast.ArithmeticOperation( - op=ast.ArithmeticOperationOp.Sub, left=ast.Constant(value=0), right=self.visit(ctx.columnExpr()) + op=ast.ArithmeticOperationOp.Sub, + left=ast.Constant(value=0), + right=self.visit(ctx.columnExpr()), ) def visitColumnExprSubquery(self, ctx: HogQLParser.ColumnExprSubqueryContext): @@ -737,7 +752,8 @@ def visitColumnArgExpr(self, ctx: HogQLParser.ColumnArgExprContext): def visitColumnLambdaExpr(self, ctx: HogQLParser.ColumnLambdaExprContext): return ast.Lambda( - args=[self.visit(identifier) for identifier in ctx.identifier()], expr=self.visit(ctx.columnExpr()) + args=[self.visit(identifier) for identifier in ctx.identifier()], + expr=self.visit(ctx.columnExpr()), ) def visitWithExprList(self, ctx: HogQLParser.WithExprListContext): @@ -863,4 +879,7 @@ def visitEnumValue(self, ctx: HogQLParser.EnumValueContext): raise NotImplementedException(f"Unsupported node: EnumValue") def visitColumnExprNullish(self, ctx: HogQLParser.ColumnExprNullishContext): - return ast.Call(name="ifNull", args=[self.visit(ctx.columnExpr(0)), self.visit(ctx.columnExpr(1))]) + return ast.Call( + name="ifNull", + args=[self.visit(ctx.columnExpr(0)), self.visit(ctx.columnExpr(1))], + ) diff --git a/posthog/hogql/printer.py b/posthog/hogql/printer.py index 21cf3ef978060..fa55d34e586a8 100644 --- a/posthog/hogql/printer.py +++ b/posthog/hogql/printer.py @@ -30,7 +30,8 @@ escape_hogql_string, ) from posthog.hogql.functions.mapping import ALL_EXPOSED_FUNCTION_NAMES, validate_function_args -from posthog.hogql.resolver import ResolverException, lookup_field_by_name, resolve_types +from posthog.hogql.resolver import ResolverException, resolve_types +from posthog.hogql.resolver_utils import lookup_field_by_name from posthog.hogql.transforms.in_cohort import resolve_in_cohorts from posthog.hogql.transforms.lazy_tables import resolve_lazy_tables from posthog.hogql.transforms.property_types import resolve_property_types @@ -74,7 +75,12 @@ def print_ast( ) -> str: prepared_ast = prepare_ast_for_printing(node=node, context=context, dialect=dialect, stack=stack, settings=settings) return print_prepared_ast( - node=prepared_ast, context=context, dialect=dialect, stack=stack, settings=settings, pretty=pretty + node=prepared_ast, + context=context, + dialect=dialect, + stack=stack, + settings=settings, + pretty=pretty, ) @@ -121,9 +127,13 @@ def print_prepared_ast( ) -> str: with context.timings.measure("printer"): # _Printer also adds a team_id guard if printing clickhouse - return _Printer(context=context, dialect=dialect, stack=stack or [], settings=settings, pretty=pretty).visit( - node - ) + return _Printer( + context=context, + dialect=dialect, + stack=stack or [], + settings=settings, + pretty=pretty, + ).visit(node) @dataclass @@ -238,7 +248,11 @@ def visit_select_query(self, node: ast.SelectQuery): array_join = "" if node.array_join_op is not None: - if node.array_join_op not in ("ARRAY JOIN", "LEFT ARRAY JOIN", "INNER ARRAY JOIN"): + if node.array_join_op not in ( + "ARRAY JOIN", + "LEFT ARRAY JOIN", + "INNER ARRAY JOIN", + ): raise HogQLException(f"Invalid ARRAY JOIN operation: {node.array_join_op}") array_join = node.array_join_op if len(node.array_join_list) == 0: @@ -266,7 +280,10 @@ def visit_select_query(self, node: ast.SelectQuery): if isinstance(limit, ast.Constant) and isinstance(limit.value, int): limit.value = min(limit.value, MAX_SELECT_RETURNED_ROWS) else: - limit = ast.Call(name="min2", args=[ast.Constant(value=MAX_SELECT_RETURNED_ROWS), limit]) + limit = ast.Call( + name="min2", + args=[ast.Constant(value=MAX_SELECT_RETURNED_ROWS), limit], + ) else: limit = ast.Constant(value=MAX_SELECT_RETURNED_ROWS) @@ -642,7 +659,11 @@ def visit_call(self, node: ast.Call): func_meta = HOGQL_AGGREGATIONS[node.name] validate_function_args( - node.args, func_meta.min_args, func_meta.max_args, node.name, function_term="aggregation" + node.args, + func_meta.min_args, + func_meta.max_args, + node.name, + function_term="aggregation", ) if func_meta.min_params: if node.params is None: @@ -678,7 +699,11 @@ def visit_call(self, node: ast.Call): if node.params is None: raise HogQLException(f"Function '{node.name}' requires parameters in addition to arguments") validate_function_args( - node.params, func_meta.min_params, func_meta.max_params, node.name, argument_term="parameter" + node.params, + func_meta.min_params, + func_meta.max_params, + node.name, + argument_term="parameter", ) if self.dialect == "clickhouse": @@ -724,7 +749,10 @@ def visit_call(self, node: ast.Call): ) if first_arg_constant_type is not None: - for overload_types, overload_clickhouse_name in func_meta.overloads: + for ( + overload_types, + overload_clickhouse_name, + ) in func_meta.overloads: if isinstance(first_arg_constant_type, overload_types): relevant_clickhouse_name = overload_clickhouse_name break # Found an overload matching the first function org @@ -801,7 +829,8 @@ def visit_field_type(self, type: ast.FieldType): return self.visit( ast.AsteriskType( table_type=ast.TableAliasType( - table_type=ast.TableType(table=resolved_field), alias=type.table_type.alias + table_type=ast.TableType(table=resolved_field), + alias=type.table_type.alias, ) ) ) diff --git a/posthog/hogql/property.py b/posthog/hogql/property.py index c0341461e1293..9d619c23175b6 100644 --- a/posthog/hogql/property.py +++ b/posthog/hogql/property.py @@ -10,12 +10,24 @@ from posthog.hogql.errors import NotImplementedException from posthog.hogql.parser import parse_expr from posthog.hogql.visitor import TraversingVisitor, clone_expr -from posthog.models import Action, ActionStep, Cohort, Property, Team, PropertyDefinition +from posthog.models import ( + Action, + ActionStep, + Cohort, + Property, + Team, + PropertyDefinition, +) from posthog.models.event import Selector from posthog.models.property import PropertyGroup from posthog.models.property.util import build_selector_regex from posthog.models.property_definition import PropertyType -from posthog.schema import PropertyOperator, PropertyGroupFilter, PropertyGroupFilterValue, FilterLogicalOperator +from posthog.schema import ( + PropertyOperator, + PropertyGroupFilter, + PropertyGroupFilterValue, + FilterLogicalOperator, +) def has_aggregation(expr: AST) -> bool: @@ -116,7 +128,14 @@ def property_to_expr( else: exprs = [ property_to_expr( - Property(type=property.type, key=property.key, operator=property.operator, value=v), team, scope + Property( + type=property.type, + key=property.key, + operator=property.operator, + value=v, + ), + team, + scope, ) for v in value ] @@ -133,12 +152,25 @@ def property_to_expr( properties_field = ast.Field(chain=chain) if operator == PropertyOperator.is_set: - return ast.CompareOperation(op=ast.CompareOperationOp.NotEq, left=field, right=ast.Constant(value=None)) + return ast.CompareOperation( + op=ast.CompareOperationOp.NotEq, + left=field, + right=ast.Constant(value=None), + ) elif operator == PropertyOperator.is_not_set: return ast.Or( exprs=[ - ast.CompareOperation(op=ast.CompareOperationOp.Eq, left=field, right=ast.Constant(value=None)), - ast.Not(expr=ast.Call(name="JSONHas", args=[properties_field, ast.Constant(value=property.key)])), + ast.CompareOperation( + op=ast.CompareOperationOp.Eq, + left=field, + right=ast.Constant(value=None), + ), + ast.Not( + expr=ast.Call( + name="JSONHas", + args=[properties_field, ast.Constant(value=property.key)], + ) + ), ] ) elif operator == PropertyOperator.icontains: @@ -156,7 +188,10 @@ def property_to_expr( elif operator == PropertyOperator.regex: return ast.Call(name="match", args=[field, ast.Constant(value=value)]) elif operator == PropertyOperator.not_regex: - return ast.Call(name="not", args=[ast.Call(name="match", args=[field, ast.Constant(value=value)])]) + return ast.Call( + name="not", + args=[ast.Call(name="match", args=[field, ast.Constant(value=value)])], + ) elif operator == PropertyOperator.exact or operator == PropertyOperator.is_date_exact: op = ast.CompareOperationOp.Eq elif operator == PropertyOperator.is_not: @@ -207,7 +242,14 @@ def property_to_expr( else: exprs = [ property_to_expr( - Property(type=property.type, key=property.key, operator=property.operator, value=v), team, scope + Property( + type=property.type, + key=property.key, + operator=property.operator, + value=v, + ), + team, + scope, ) for v in value ] @@ -287,11 +329,20 @@ def action_to_expr(action: Action) -> ast.Expr: if step.url: if step.url_matching == ActionStep.EXACT: - expr = parse_expr("properties.$current_url = {url}", {"url": ast.Constant(value=step.url)}) + expr = parse_expr( + "properties.$current_url = {url}", + {"url": ast.Constant(value=step.url)}, + ) elif step.url_matching == ActionStep.REGEX: - expr = parse_expr("properties.$current_url =~ {regex}", {"regex": ast.Constant(value=step.url)}) + expr = parse_expr( + "properties.$current_url =~ {regex}", + {"regex": ast.Constant(value=step.url)}, + ) else: - expr = parse_expr("properties.$current_url like {url}", {"url": ast.Constant(value=f"%{step.url}%")}) + expr = parse_expr( + "properties.$current_url like {url}", + {"url": ast.Constant(value=f"%{step.url}%")}, + ) exprs.append(expr) if step.properties: diff --git a/posthog/hogql/query.py b/posthog/hogql/query.py index 723476b0ab5e4..697305d0ae964 100644 --- a/posthog/hogql/query.py +++ b/posthog/hogql/query.py @@ -8,7 +8,11 @@ from posthog.hogql.modifiers import create_default_modifiers_for_team from posthog.hogql.parser import parse_select from posthog.hogql.placeholders import replace_placeholders, find_placeholders -from posthog.hogql.printer import prepare_ast_for_printing, print_ast, print_prepared_ast +from posthog.hogql.printer import ( + prepare_ast_for_printing, + print_ast, + print_prepared_ast, +) from posthog.hogql.filters import replace_filters from posthog.hogql.timings import HogQLTimings from posthog.hogql.visitor import clone_expr @@ -61,7 +65,10 @@ def execute_hogql_query( select_query = replace_placeholders(select_query, placeholders) with timings.measure("max_limit"): - from posthog.hogql.constants import DEFAULT_RETURNED_ROWS, MAX_SELECT_RETURNED_ROWS + from posthog.hogql.constants import ( + DEFAULT_RETURNED_ROWS, + MAX_SELECT_RETURNED_ROWS, + ) select_queries = ( select_query.select_queries if isinstance(select_query, ast.SelectUnionQuery) else [select_query] @@ -104,7 +111,10 @@ def execute_hogql_query( else: print_columns.append( print_prepared_ast( - node=node, context=hogql_query_context, dialect="hogql", stack=[select_query_hogql] + node=node, + context=hogql_query_context, + dialect="hogql", + stack=[select_query_hogql], ) ) @@ -117,7 +127,10 @@ def execute_hogql_query( modifiers=query_modifiers, ) clickhouse_sql = print_ast( - select_query, context=clickhouse_context, dialect="clickhouse", settings=settings or HogQLGlobalSettings() + select_query, + context=clickhouse_context, + dialect="clickhouse", + settings=settings or HogQLGlobalSettings(), ) timings_dict = timings.to_dict() diff --git a/posthog/hogql/resolver.py b/posthog/hogql/resolver.py index 48ea712bb9e13..8d971084aa583 100644 --- a/posthog/hogql/resolver.py +++ b/posthog/hogql/resolver.py @@ -6,11 +6,17 @@ from posthog.hogql.ast import FieldTraverserType, ConstantType from posthog.hogql.functions import HOGQL_POSTHOG_FUNCTIONS, cohort from posthog.hogql.context import HogQLContext -from posthog.hogql.database.models import StringJSONDatabaseField, FunctionCallTable, LazyTable, SavedQuery +from posthog.hogql.database.models import ( + StringJSONDatabaseField, + FunctionCallTable, + LazyTable, + SavedQuery, +) from posthog.hogql.errors import ResolverException from posthog.hogql.functions.mapping import validate_function_args from posthog.hogql.functions.sparkline import sparkline from posthog.hogql.parser import parse_select +from posthog.hogql.resolver_utils import lookup_cte_by_name, lookup_field_by_name from posthog.hogql.visitor import CloningVisitor, clone_expr from posthog.models.utils import UUIDT from posthog.hogql.database.schema.events import EventsTable @@ -47,7 +53,9 @@ def resolve_constant_data_type(constant: Any) -> ConstantType: def resolve_types( - node: ast.Expr, context: HogQLContext, scopes: Optional[List[ast.SelectQueryType]] = None + node: ast.Expr, + context: HogQLContext, + scopes: Optional[List[ast.SelectQueryType]] = None, ) -> ast.Expr: return Resolver(scopes=scopes, context=context).visit(node) @@ -331,7 +339,10 @@ def visit_call(self, node: ast.Call): else: param_types.append(ast.UnknownType()) node.type = ast.CallType( - name=node.name, arg_types=arg_types, param_types=param_types, return_type=ast.UnknownType() + name=node.name, + arg_types=arg_types, + param_types=param_types, + return_type=ast.UnknownType(), ) return node @@ -453,7 +464,10 @@ def visit_array_access(self, node: ast.ArrayAccess): (isinstance(node.array.type, ast.PropertyType)) or ( isinstance(node.array.type, ast.FieldType) - and isinstance(node.array.type.resolve_database_field(), StringJSONDatabaseField) + and isinstance( + node.array.type.resolve_database_field(), + StringJSONDatabaseField, + ) ) ) ): @@ -563,30 +577,3 @@ def _is_next_s3(self, node: Optional[ast.JoinExpr]): if isinstance(node.type, ast.TableAliasType): return isinstance(node.type.table_type.table, S3Table) return False - - -def lookup_field_by_name(scope: ast.SelectQueryType, name: str) -> Optional[ast.Type]: - """Looks for a field in the scope's list of aliases and children for each joined table.""" - if name in scope.aliases: - return scope.aliases[name] - else: - named_tables = [table for table in scope.tables.values() if table.has_child(name)] - anonymous_tables = [table for table in scope.anonymous_tables if table.has_child(name)] - tables_with_field = named_tables + anonymous_tables - - if len(tables_with_field) > 1: - raise ResolverException(f"Ambiguous query. Found multiple sources for field: {name}") - elif len(tables_with_field) == 1: - return tables_with_field[0].get_child(name) - - if scope.parent: - return lookup_field_by_name(scope.parent, name) - - return None - - -def lookup_cte_by_name(scopes: List[ast.SelectQueryType], name: str) -> Optional[ast.CTE]: - for scope in reversed(scopes): - if scope and scope.ctes and name in scope.ctes: - return scope.ctes[name] - return None diff --git a/posthog/hogql/resolver_utils.py b/posthog/hogql/resolver_utils.py new file mode 100644 index 0000000000000..2fb8fd6d814f7 --- /dev/null +++ b/posthog/hogql/resolver_utils.py @@ -0,0 +1,47 @@ +from typing import List, Optional +from posthog.hogql import ast +from posthog.hogql.errors import HogQLException, ResolverException + + +def lookup_field_by_name(scope: ast.SelectQueryType, name: str) -> Optional[ast.Type]: + """Looks for a field in the scope's list of aliases and children for each joined table.""" + if name in scope.aliases: + return scope.aliases[name] + else: + named_tables = [table for table in scope.tables.values() if table.has_child(name)] + anonymous_tables = [table for table in scope.anonymous_tables if table.has_child(name)] + tables_with_field = named_tables + anonymous_tables + + if len(tables_with_field) > 1: + raise ResolverException(f"Ambiguous query. Found multiple sources for field: {name}") + elif len(tables_with_field) == 1: + return tables_with_field[0].get_child(name) + + if scope.parent: + return lookup_field_by_name(scope.parent, name) + + return None + + +def lookup_cte_by_name(scopes: List[ast.SelectQueryType], name: str) -> Optional[ast.CTE]: + for scope in reversed(scopes): + if scope and scope.ctes and name in scope.ctes: + return scope.ctes[name] + return None + + +def get_long_table_name(select: ast.SelectQueryType, type: ast.Type) -> str: + if isinstance(type, ast.TableType): + return select.get_alias_for_table_type(type) or "" + elif isinstance(type, ast.LazyTableType): + return type.table.to_printed_hogql() + elif isinstance(type, ast.TableAliasType): + return type.alias + elif isinstance(type, ast.SelectQueryAliasType): + return type.alias + elif isinstance(type, ast.LazyJoinType): + return f"{get_long_table_name(select, type.table_type)}__{type.field}" + elif isinstance(type, ast.VirtualTableType): + return f"{get_long_table_name(select, type.table_type)}__{type.field}" + else: + raise HogQLException(f"Unknown table type in LazyTableResolver: {type.__class__.__name__}") diff --git a/posthog/hogql/test/_test_parser.py b/posthog/hogql/test/_test_parser.py index 765d4fbaab4de..9b5fa20dcf910 100644 --- a/posthog/hogql/test/_test_parser.py +++ b/posthog/hogql/test/_test_parser.py @@ -57,7 +57,9 @@ def test_conditional(self): name="if", args=[ ast.CompareOperation( - op=ast.CompareOperationOp.Gt, left=ast.Constant(value=1), right=ast.Constant(value=2) + op=ast.CompareOperationOp.Gt, + left=ast.Constant(value=1), + right=ast.Constant(value=2), ), ast.Constant(value=1), ast.Constant(value=2), @@ -69,11 +71,15 @@ def test_arrays(self): self.assertEqual(self._expr("[]"), ast.Array(exprs=[])) self.assertEqual(self._expr("[1]"), ast.Array(exprs=[ast.Constant(value=1)])) self.assertEqual( - self._expr("[1, avg()]"), ast.Array(exprs=[ast.Constant(value=1), ast.Call(name="avg", args=[])]) + self._expr("[1, avg()]"), + ast.Array(exprs=[ast.Constant(value=1), ast.Call(name="avg", args=[])]), ) self.assertEqual( self._expr("properties['value']"), - ast.ArrayAccess(array=ast.Field(chain=["properties"]), property=ast.Constant(value="value")), + ast.ArrayAccess( + array=ast.Field(chain=["properties"]), + property=ast.Constant(value="value"), + ), ) self.assertEqual( self._expr("properties[(select 'value')]"), @@ -98,7 +104,8 @@ def test_arrays(self): def test_tuples(self): self.assertEqual( - self._expr("(1, avg())"), ast.Tuple(exprs=[ast.Constant(value=1), ast.Call(name="avg", args=[])]) + self._expr("(1, avg())"), + ast.Tuple(exprs=[ast.Constant(value=1), ast.Call(name="avg", args=[])]), ) # needs at least two values to be a tuple self.assertEqual(self._expr("(1)"), ast.Constant(value=1)) @@ -165,44 +172,58 @@ def test_arithmetic_operations(self): self.assertEqual( self._expr("1 + 2"), ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Add + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.ArithmeticOperationOp.Add, ), ) self.assertEqual( self._expr("1 + -2"), ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=-2), op=ast.ArithmeticOperationOp.Add + left=ast.Constant(value=1), + right=ast.Constant(value=-2), + op=ast.ArithmeticOperationOp.Add, ), ) self.assertEqual( self._expr("1 - 2"), ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Sub + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.ArithmeticOperationOp.Sub, ), ) self.assertEqual( self._expr("1 * 2"), ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Mult + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.ArithmeticOperationOp.Mult, ), ) self.assertEqual( self._expr("1 / 2"), ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Div + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.ArithmeticOperationOp.Div, ), ) self.assertEqual( self._expr("1 % 2"), ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Mod + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.ArithmeticOperationOp.Mod, ), ) self.assertEqual( self._expr("1 + 2 + 2"), ast.ArithmeticOperation( left=ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Add + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.ArithmeticOperationOp.Add, ), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Add, @@ -212,7 +233,9 @@ def test_arithmetic_operations(self): self._expr("1 * 1 * 2"), ast.ArithmeticOperation( left=ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.ArithmeticOperationOp.Mult + left=ast.Constant(value=1), + right=ast.Constant(value=1), + op=ast.ArithmeticOperationOp.Mult, ), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Mult, @@ -223,7 +246,9 @@ def test_arithmetic_operations(self): ast.ArithmeticOperation( left=ast.Constant(value=1), right=ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Mult + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.ArithmeticOperationOp.Mult, ), op=ast.ArithmeticOperationOp.Add, ), @@ -232,7 +257,9 @@ def test_arithmetic_operations(self): self._expr("1 * 1 + 2"), ast.ArithmeticOperation( left=ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.ArithmeticOperationOp.Mult + left=ast.Constant(value=1), + right=ast.Constant(value=1), + op=ast.ArithmeticOperationOp.Mult, ), right=ast.Constant(value=2), op=ast.ArithmeticOperationOp.Add, @@ -243,43 +270,57 @@ def test_math_comparison_operations(self): self.assertEqual( self._expr("1 = 2"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.Eq + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.CompareOperationOp.Eq, ), ) self.assertEqual( self._expr("1 == 2"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.Eq + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.CompareOperationOp.Eq, ), ) self.assertEqual( self._expr("1 != 2"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.NotEq + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.CompareOperationOp.NotEq, ), ) self.assertEqual( self._expr("1 < 2"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.Lt + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.CompareOperationOp.Lt, ), ) self.assertEqual( self._expr("1 <= 2"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.LtEq + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.CompareOperationOp.LtEq, ), ) self.assertEqual( self._expr("1 > 2"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.Gt + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.CompareOperationOp.Gt, ), ) self.assertEqual( self._expr("1 >= 2"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=2), op=ast.CompareOperationOp.GtEq + left=ast.Constant(value=1), + right=ast.Constant(value=2), + op=ast.CompareOperationOp.GtEq, ), ) @@ -287,13 +328,17 @@ def test_null_comparison_operations(self): self.assertEqual( self._expr("1 is null"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=None), op=ast.CompareOperationOp.Eq + left=ast.Constant(value=1), + right=ast.Constant(value=None), + op=ast.CompareOperationOp.Eq, ), ) self.assertEqual( self._expr("1 is not null"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=None), op=ast.CompareOperationOp.NotEq + left=ast.Constant(value=1), + right=ast.Constant(value=None), + op=ast.CompareOperationOp.NotEq, ), ) @@ -301,25 +346,33 @@ def test_like_comparison_operations(self): self.assertEqual( self._expr("1 like 'a%sd'"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value="a%sd"), op=ast.CompareOperationOp.Like + left=ast.Constant(value=1), + right=ast.Constant(value="a%sd"), + op=ast.CompareOperationOp.Like, ), ) self.assertEqual( self._expr("1 not like 'a%sd'"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value="a%sd"), op=ast.CompareOperationOp.NotLike + left=ast.Constant(value=1), + right=ast.Constant(value="a%sd"), + op=ast.CompareOperationOp.NotLike, ), ) self.assertEqual( self._expr("1 ilike 'a%sd'"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value="a%sd"), op=ast.CompareOperationOp.ILike + left=ast.Constant(value=1), + right=ast.Constant(value="a%sd"), + op=ast.CompareOperationOp.ILike, ), ) self.assertEqual( self._expr("1 not ilike 'a%sd'"), ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value="a%sd"), op=ast.CompareOperationOp.NotILike + left=ast.Constant(value=1), + right=ast.Constant(value="a%sd"), + op=ast.CompareOperationOp.NotILike, ), ) @@ -335,7 +388,10 @@ def test_and_or(self): self.assertEqual( self._expr("true and not false"), ast.And( - exprs=[ast.Constant(value=True), ast.Not(expr=ast.Constant(value=False))], + exprs=[ + ast.Constant(value=True), + ast.Not(expr=ast.Constant(value=False)), + ], ), ) self.assertEqual( @@ -355,7 +411,10 @@ def test_and_or(self): exprs=[ ast.Constant(value=True), ast.And( - exprs=[ast.Constant(value=False), ast.Not(expr=ast.Constant(value=True))], + exprs=[ + ast.Constant(value=False), + ast.Not(expr=ast.Constant(value=True)), + ], ), ast.Constant(value=2), ], @@ -376,7 +435,9 @@ def test_parens(self): self.assertEqual( self._expr("(1 + 1)"), ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.ArithmeticOperationOp.Add + left=ast.Constant(value=1), + right=ast.Constant(value=1), + op=ast.ArithmeticOperationOp.Add, ), ) self.assertEqual( @@ -384,7 +445,9 @@ def test_parens(self): ast.ArithmeticOperation( left=ast.Constant(value=1), right=ast.ArithmeticOperation( - left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.ArithmeticOperationOp.Add + left=ast.Constant(value=1), + right=ast.Constant(value=1), + op=ast.ArithmeticOperationOp.Add, ), op=ast.ArithmeticOperationOp.Add, ), @@ -398,7 +461,9 @@ def test_field_access(self): self.assertEqual( self._expr("event like '$%'"), ast.CompareOperation( - left=ast.Field(chain=["event"]), right=ast.Constant(value="$%"), op=ast.CompareOperationOp.Like + left=ast.Field(chain=["event"]), + right=ast.Constant(value="$%"), + op=ast.CompareOperationOp.Like, ), ) @@ -435,13 +500,24 @@ def test_calls(self): ) self.assertEqual( self._expr("avg(1,2,3)"), - ast.Call(name="avg", args=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]), + ast.Call( + name="avg", + args=[ + ast.Constant(value=1), + ast.Constant(value=2), + ast.Constant(value=3), + ], + ), ) def test_calls_with_params(self): self.assertEqual( self._expr("quantile(0.95)(foo)"), - ast.Call(name="quantile", args=[ast.Field(chain=["foo"])], params=[ast.Constant(value=0.95)]), + ast.Call( + name="quantile", + args=[ast.Field(chain=["foo"])], + params=[ast.Constant(value=0.95)], + ), ) def test_alias(self): @@ -513,15 +589,25 @@ def test_intervals(self): ) def test_select_columns(self): - self.assertEqual(self._select("select 1"), ast.SelectQuery(select=[ast.Constant(value=1)])) + self.assertEqual( + self._select("select 1"), + ast.SelectQuery(select=[ast.Constant(value=1)]), + ) self.assertEqual( self._select("select 1, 4, 'string'"), - ast.SelectQuery(select=[ast.Constant(value=1), ast.Constant(value=4), ast.Constant(value="string")]), + ast.SelectQuery( + select=[ + ast.Constant(value=1), + ast.Constant(value=4), + ast.Constant(value="string"), + ] + ), ) def test_select_columns_distinct(self): self.assertEqual( - self._select("select distinct 1"), ast.SelectQuery(select=[ast.Constant(value=1)], distinct=True) + self._select("select distinct 1"), + ast.SelectQuery(select=[ast.Constant(value=1)], distinct=True), ) def test_select_where(self): @@ -534,7 +620,9 @@ def test_select_where(self): ast.SelectQuery( select=[ast.Constant(value=1)], where=ast.CompareOperation( - op=ast.CompareOperationOp.Eq, left=ast.Constant(value=1), right=ast.Constant(value=2) + op=ast.CompareOperationOp.Eq, + left=ast.Constant(value=1), + right=ast.Constant(value=2), ), ), ) @@ -549,7 +637,9 @@ def test_select_prewhere(self): ast.SelectQuery( select=[ast.Constant(value=1)], prewhere=ast.CompareOperation( - op=ast.CompareOperationOp.Eq, left=ast.Constant(value=1), right=ast.Constant(value=2) + op=ast.CompareOperationOp.Eq, + left=ast.Constant(value=1), + right=ast.Constant(value=2), ), ), ) @@ -564,7 +654,9 @@ def test_select_having(self): ast.SelectQuery( select=[ast.Constant(value=1)], having=ast.CompareOperation( - op=ast.CompareOperationOp.Eq, left=ast.Constant(value=1), right=ast.Constant(value=2) + op=ast.CompareOperationOp.Eq, + left=ast.Constant(value=1), + right=ast.Constant(value=2), ), ), ) @@ -575,10 +667,14 @@ def test_select_complex_wheres(self): ast.SelectQuery( select=[ast.Constant(value=1)], where=ast.CompareOperation( - op=ast.CompareOperationOp.Eq, left=ast.Constant(value=1), right=ast.Constant(value=2) + op=ast.CompareOperationOp.Eq, + left=ast.Constant(value=1), + right=ast.Constant(value=2), ), prewhere=ast.CompareOperation( - op=ast.CompareOperationOp.NotEq, left=ast.Constant(value=2), right=ast.Constant(value=3) + op=ast.CompareOperationOp.NotEq, + left=ast.Constant(value=2), + right=ast.Constant(value=3), ), having=ast.CompareOperation( op=ast.CompareOperationOp.Like, @@ -592,7 +688,8 @@ def test_select_from(self): self.assertEqual( self._select("select 1 from events"), ast.SelectQuery( - select=[ast.Constant(value=1)], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])) + select=[ast.Constant(value=1)], + select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), ), ) self.assertEqual( @@ -636,7 +733,8 @@ def test_select_from(self): select=[ast.Constant(value=1)], select_from=ast.JoinExpr( table=ast.SelectQuery( - select=[ast.Constant(value=1)], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])) + select=[ast.Constant(value=1)], + select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), ) ), ), @@ -647,7 +745,8 @@ def test_select_from(self): select=[ast.Constant(value=1)], select_from=ast.JoinExpr( table=ast.SelectQuery( - select=[ast.Constant(value=1)], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])) + select=[ast.Constant(value=1)], + select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), ), alias="sq", ), @@ -663,7 +762,10 @@ def test_select_from_placeholder(self): ), ) self.assertEqual( - self._select("select 1 from {placeholder}", {"placeholder": ast.Field(chain=["events"])}), + self._select( + "select 1 from {placeholder}", + {"placeholder": ast.Field(chain=["events"])}, + ), ast.SelectQuery( select=[ast.Constant(value=1)], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), @@ -830,7 +932,13 @@ def test_select_array_join(self): array_join_op="ARRAY JOIN", array_join_list=[ ast.Alias( - expr=ast.Array(exprs=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]), + expr=ast.Array( + exprs=[ + ast.Constant(value=1), + ast.Constant(value=2), + ast.Constant(value=3), + ] + ), alias="a", ) ], @@ -844,7 +952,13 @@ def test_select_array_join(self): array_join_op="INNER ARRAY JOIN", array_join_list=[ ast.Alias( - expr=ast.Array(exprs=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]), + expr=ast.Array( + exprs=[ + ast.Constant(value=1), + ast.Constant(value=2), + ast.Constant(value=3), + ] + ), alias="a", ) ], @@ -858,11 +972,23 @@ def test_select_array_join(self): array_join_op="LEFT ARRAY JOIN", array_join_list=[ ast.Alias( - expr=ast.Array(exprs=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]), + expr=ast.Array( + exprs=[ + ast.Constant(value=1), + ast.Constant(value=2), + ast.Constant(value=3), + ] + ), alias="a", ), ast.Alias( - expr=ast.Array(exprs=[ast.Constant(value=4), ast.Constant(value=5), ast.Constant(value=6)]), + expr=ast.Array( + exprs=[ + ast.Constant(value=4), + ast.Constant(value=5), + ast.Constant(value=6), + ] + ), alias="b", ), ], @@ -878,7 +1004,10 @@ def test_select_array_join_errors(self): with self.assertRaises(HogQLException) as e: self._select("select a ARRAY JOIN [1,2,3]") - self.assertEqual(str(e.exception), "Using ARRAY JOIN without a FROM clause is not permitted") + self.assertEqual( + str(e.exception), + "Using ARRAY JOIN without a FROM clause is not permitted", + ) self.assertEqual(e.exception.start, 0) self.assertEqual(e.exception.end, 27) @@ -895,15 +1024,30 @@ def test_select_group_by(self): def test_order_by(self): self.assertEqual( parse_order_expr("1 ASC"), - ast.OrderExpr(expr=ast.Constant(value=1, start=0, end=1), order="ASC", start=0, end=5), + ast.OrderExpr( + expr=ast.Constant(value=1, start=0, end=1), + order="ASC", + start=0, + end=5, + ), ) self.assertEqual( parse_order_expr("event"), - ast.OrderExpr(expr=ast.Field(chain=["event"], start=0, end=5), order="ASC", start=0, end=5), + ast.OrderExpr( + expr=ast.Field(chain=["event"], start=0, end=5), + order="ASC", + start=0, + end=5, + ), ) self.assertEqual( parse_order_expr("timestamp DESC"), - ast.OrderExpr(expr=ast.Field(chain=["timestamp"], start=0, end=9), order="DESC", start=0, end=14), + ast.OrderExpr( + expr=ast.Field(chain=["timestamp"], start=0, end=9), + order="DESC", + start=0, + end=14, + ), ) def test_select_order_by(self): @@ -993,7 +1137,10 @@ def test_select_placeholders(self): ), ) self.assertEqual( - self._select("select 1 where 1 == {hogql_val_1}", {"hogql_val_1": ast.Constant(value="bar")}), + self._select( + "select 1 where 1 == {hogql_val_1}", + {"hogql_val_1": ast.Constant(value="bar")}, + ), ast.SelectQuery( select=[ast.Constant(value=1)], where=ast.CompareOperation( @@ -1082,7 +1229,13 @@ def test_select_with_columns(self): self.assertEqual( self._select("with event as boo select boo from events"), ast.SelectQuery( - ctes={"boo": ast.CTE(name="boo", expr=ast.Field(chain=["event"]), cte_type="column")}, + ctes={ + "boo": ast.CTE( + name="boo", + expr=ast.Field(chain=["event"]), + cte_type="column", + ) + }, select=[ast.Field(chain=["boo"])], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), ), @@ -1090,7 +1243,13 @@ def test_select_with_columns(self): self.assertEqual( self._select("with count() as kokku select kokku from events"), ast.SelectQuery( - ctes={"kokku": ast.CTE(name="kokku", expr=ast.Call(name="count", args=[]), cte_type="column")}, + ctes={ + "kokku": ast.CTE( + name="kokku", + expr=ast.Call(name="count", args=[]), + cte_type="column", + ) + }, select=[ast.Field(chain=["kokku"])], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), ), @@ -1169,7 +1328,14 @@ def test_ctes_subquery_recursion(self): def test_case_when(self): self.assertEqual( self._expr("case when 1 then 2 else 3 end"), - ast.Call(name="if", args=[ast.Constant(value=1), ast.Constant(value=2), ast.Constant(value=3)]), + ast.Call( + name="if", + args=[ + ast.Constant(value=1), + ast.Constant(value=2), + ast.Constant(value=3), + ], + ), ) def test_case_when_many(self): @@ -1214,7 +1380,12 @@ def test_window_functions(self): args=[ast.Field(chain=["timestamp"])], over_expr=ast.WindowExpr( partition_by=[ast.Field(chain=["person", "id"])], - order_by=[ast.OrderExpr(expr=ast.Field(chain=["timestamp"]), order="DESC")], + order_by=[ + ast.OrderExpr( + expr=ast.Field(chain=["timestamp"]), + order="DESC", + ) + ], frame_method="ROWS", frame_start=ast.WindowFrameExpr(frame_type="PRECEDING", frame_value=None), frame_end=ast.WindowFrameExpr(frame_type="PRECEDING", frame_value=1), @@ -1257,7 +1428,8 @@ def test_window_functions_with_window(self): def test_property_access_with_arrays_zero_index_error(self): query = f"SELECT properties.something[0] FROM events" with self.assertRaisesMessage( - SyntaxException, "SQL indexes start from one, not from zero. E.g: array[1]" + SyntaxException, + "SQL indexes start from one, not from zero. E.g: array[1]", ) as e: self._select(query) self.assertEqual(e.exception.start, 7) @@ -1266,7 +1438,8 @@ def test_property_access_with_arrays_zero_index_error(self): def test_property_access_with_tuples_zero_index_error(self): query = f"SELECT properties.something.0 FROM events" with self.assertRaisesMessage( - SyntaxException, "SQL indexes start from one, not from zero. E.g: array[1]" + SyntaxException, + "SQL indexes start from one, not from zero. E.g: array[1]", ) as e: self._select(query) self.assertEqual(e.exception.start, 7) @@ -1275,7 +1448,8 @@ def test_property_access_with_tuples_zero_index_error(self): def test_reserved_keyword_alias_error(self): query = f"SELECT 0 AS trUE FROM events" with self.assertRaisesMessage( - SyntaxException, '"trUE" cannot be an alias or identifier, as it\'s a reserved keyword' + SyntaxException, + '"trUE" cannot be an alias or identifier, as it\'s a reserved keyword', ) as e: self._select(query) self.assertEqual(e.exception.start, 7) @@ -1284,7 +1458,8 @@ def test_reserved_keyword_alias_error(self): def test_malformed_sql(self): query = "SELEC 2" with self.assertRaisesMessage( - SyntaxException, "mismatched input 'SELEC' expecting {SELECT, WITH, '('}" + SyntaxException, + "mismatched input 'SELEC' expecting {SELECT, WITH, '('}", ) as e: self._select(query) self.assertEqual(e.exception.start, 0) diff --git a/posthog/hogql/test/test_bytecode.py b/posthog/hogql/test/test_bytecode.py index bbd90608fe0c6..7fee12533d6da 100644 --- a/posthog/hogql/test/test_bytecode.py +++ b/posthog/hogql/test/test_bytecode.py @@ -11,20 +11,54 @@ def test_bytecode_create(self): self.assertEqual(to_bytecode("1 or 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.OR, 2]) self.assertEqual( to_bytecode("1 or (2 and 1) or 2"), - [_H, op.INTEGER, 2, op.INTEGER, 1, op.INTEGER, 2, op.AND, 2, op.INTEGER, 1, op.OR, 3], + [ + _H, + op.INTEGER, + 2, + op.INTEGER, + 1, + op.INTEGER, + 2, + op.AND, + 2, + op.INTEGER, + 1, + op.OR, + 3, + ], ) self.assertEqual( to_bytecode("(1 or 2) and (1 or 2)"), - [_H, op.INTEGER, 2, op.INTEGER, 1, op.OR, 2, op.INTEGER, 2, op.INTEGER, 1, op.OR, 2, op.AND, 2], + [ + _H, + op.INTEGER, + 2, + op.INTEGER, + 1, + op.OR, + 2, + op.INTEGER, + 2, + op.INTEGER, + 1, + op.OR, + 2, + op.AND, + 2, + ], ) self.assertEqual(to_bytecode("not true"), [_H, op.TRUE, op.NOT]) self.assertEqual(to_bytecode("true"), [_H, op.TRUE]) self.assertEqual(to_bytecode("false"), [_H, op.FALSE]) self.assertEqual(to_bytecode("null"), [_H, op.NULL]) self.assertEqual(to_bytecode("3.14"), [_H, op.FLOAT, 3.14]) - self.assertEqual(to_bytecode("properties.bla"), [_H, op.STRING, "bla", op.STRING, "properties", op.FIELD, 2]) self.assertEqual( - to_bytecode("concat('arg', 'another')"), [_H, op.STRING, "another", op.STRING, "arg", op.CALL, "concat", 2] + to_bytecode("properties.bla"), + [_H, op.STRING, "bla", op.STRING, "properties", op.FIELD, 2], + ) + self.assertEqual( + to_bytecode("concat('arg', 'another')"), + [_H, op.STRING, "another", op.STRING, "arg", op.CALL, "concat", 2], ) self.assertEqual(to_bytecode("1 = 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.EQ]) self.assertEqual(to_bytecode("1 == 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.EQ]) @@ -36,32 +70,58 @@ def test_bytecode_create(self): self.assertEqual(to_bytecode("1 like 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.LIKE]) self.assertEqual(to_bytecode("1 ilike 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.ILIKE]) self.assertEqual(to_bytecode("1 not like 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_LIKE]) - self.assertEqual(to_bytecode("1 not ilike 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_ILIKE]) + self.assertEqual( + to_bytecode("1 not ilike 2"), + [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_ILIKE], + ) self.assertEqual(to_bytecode("1 in 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.IN]) self.assertEqual(to_bytecode("1 not in 2"), [_H, op.INTEGER, 2, op.INTEGER, 1, op.NOT_IN]) - self.assertEqual(to_bytecode("'string' ~ 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.REGEX]) - self.assertEqual(to_bytecode("'string' =~ 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.REGEX]) self.assertEqual( - to_bytecode("'string' !~ 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.NOT_REGEX] + to_bytecode("'string' ~ 'regex'"), + [_H, op.STRING, "regex", op.STRING, "string", op.REGEX], + ) + self.assertEqual( + to_bytecode("'string' =~ 'regex'"), + [_H, op.STRING, "regex", op.STRING, "string", op.REGEX], + ) + self.assertEqual( + to_bytecode("'string' !~ 'regex'"), + [_H, op.STRING, "regex", op.STRING, "string", op.NOT_REGEX], ) - self.assertEqual(to_bytecode("'string' ~* 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX]) - self.assertEqual(to_bytecode("'string' =~* 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX]) self.assertEqual( - to_bytecode("'string' !~* 'regex'"), [_H, op.STRING, "regex", op.STRING, "string", op.NOT_IREGEX] + to_bytecode("'string' ~* 'regex'"), + [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX], ) self.assertEqual( - to_bytecode("match('test', 'e.*')"), [_H, op.STRING, "e.*", op.STRING, "test", op.CALL, "match", 2] + to_bytecode("'string' =~* 'regex'"), + [_H, op.STRING, "regex", op.STRING, "string", op.IREGEX], ) self.assertEqual( - to_bytecode("match('test', '^e.*')"), [_H, op.STRING, "^e.*", op.STRING, "test", op.CALL, "match", 2] + to_bytecode("'string' !~* 'regex'"), + [_H, op.STRING, "regex", op.STRING, "string", op.NOT_IREGEX], ) self.assertEqual( - to_bytecode("match('test', 'x.*')"), [_H, op.STRING, "x.*", op.STRING, "test", op.CALL, "match", 2] + to_bytecode("match('test', 'e.*')"), + [_H, op.STRING, "e.*", op.STRING, "test", op.CALL, "match", 2], + ) + self.assertEqual( + to_bytecode("match('test', '^e.*')"), + [_H, op.STRING, "^e.*", op.STRING, "test", op.CALL, "match", 2], + ) + self.assertEqual( + to_bytecode("match('test', 'x.*')"), + [_H, op.STRING, "x.*", op.STRING, "test", op.CALL, "match", 2], ) self.assertEqual(to_bytecode("not('test')"), [_H, op.STRING, "test", op.NOT]) self.assertEqual(to_bytecode("not 'test'"), [_H, op.STRING, "test", op.NOT]) - self.assertEqual(to_bytecode("or('test', 'test2')"), [_H, op.STRING, "test2", op.STRING, "test", op.OR, 2]) - self.assertEqual(to_bytecode("and('test', 'test2')"), [_H, op.STRING, "test2", op.STRING, "test", op.AND, 2]) + self.assertEqual( + to_bytecode("or('test', 'test2')"), + [_H, op.STRING, "test2", op.STRING, "test", op.OR, 2], + ) + self.assertEqual( + to_bytecode("and('test', 'test2')"), + [_H, op.STRING, "test2", op.STRING, "test", op.AND, 2], + ) def test_bytecode_create_error(self): with self.assertRaises(NotImplementedException) as e: diff --git a/posthog/hogql/test/test_escape_sql.py b/posthog/hogql/test/test_escape_sql.py index 8f541f05aab40..0e24d8d8116f5 100644 --- a/posthog/hogql/test/test_escape_sql.py +++ b/posthog/hogql/test/test_escape_sql.py @@ -65,7 +65,10 @@ def test_sanitize_clickhouse_string(self): uuid = UUIDT() self.assertEqual(escape_clickhouse_string(uuid), f"toUUIDOrNull('{str(uuid)}')") date = datetime.fromisoformat("2020-02-02 02:02:02") - self.assertEqual(escape_clickhouse_string(date), "toDateTime64('2020-02-02 02:02:02.000000', 6, 'UTC')") + self.assertEqual( + escape_clickhouse_string(date), + "toDateTime64('2020-02-02 02:02:02.000000', 6, 'UTC')", + ) self.assertEqual( escape_clickhouse_string(date, timezone="Europe/Brussels"), "toDateTime64('2020-02-02 03:02:02.000000', 6, 'Europe/Brussels')", @@ -80,7 +83,10 @@ def test_sanitize_clickhouse_string(self): self.assertEqual(escape_clickhouse_string(float("123.123")), "123.123") self.assertEqual(escape_clickhouse_string(float("-123.123")), "-123.123") self.assertEqual(escape_clickhouse_string(float("0.000000000000000001")), "1e-18") - self.assertEqual(escape_clickhouse_string(float("234732482374928374923")), "2.3473248237492837e+20") + self.assertEqual( + escape_clickhouse_string(float("234732482374928374923")), + "2.3473248237492837e+20", + ) def test_sanitize_hogql_string(self): self.assertEqual(escape_hogql_string("a"), "'a'") @@ -101,7 +107,8 @@ def test_sanitize_hogql_string(self): date = datetime.fromisoformat("2020-02-02 02:02:02") self.assertEqual(escape_hogql_string(date), "toDateTime('2020-02-02 02:02:02.000000')") self.assertEqual( - escape_hogql_string(date, timezone="Europe/Brussels"), "toDateTime('2020-02-02 03:02:02.000000')" + escape_hogql_string(date, timezone="Europe/Brussels"), + "toDateTime('2020-02-02 03:02:02.000000')", ) self.assertEqual(escape_hogql_string(date.date()), "toDate('2020-02-02')") self.assertEqual(escape_hogql_string(1), "1") @@ -113,7 +120,10 @@ def test_sanitize_hogql_string(self): self.assertEqual(escape_hogql_string(float("123.123")), "123.123") self.assertEqual(escape_hogql_string(float("-123.123")), "-123.123") self.assertEqual(escape_hogql_string(float("0.000000000000000001")), "1e-18") - self.assertEqual(escape_hogql_string(float("234732482374928374923")), "2.3473248237492837e+20") + self.assertEqual( + escape_hogql_string(float("234732482374928374923")), + "2.3473248237492837e+20", + ) def test_escape_hogql_identifier_errors(self): with self.assertRaises(HogQLException) as context: diff --git a/posthog/hogql/test/test_filters.py b/posthog/hogql/test/test_filters.py index b7c20e67e4f7e..98b319bb31694 100644 --- a/posthog/hogql/test/test_filters.py +++ b/posthog/hogql/test/test_filters.py @@ -6,7 +6,12 @@ from posthog.hogql.parser import parse_expr, parse_select from posthog.hogql.printer import print_ast from posthog.hogql.visitor import clear_locations -from posthog.schema import HogQLFilters, EventPropertyFilter, PersonPropertyFilter, DateRange +from posthog.schema import ( + HogQLFilters, + EventPropertyFilter, + PersonPropertyFilter, + DateRange, +) from posthog.test.base import BaseTest @@ -20,14 +25,20 @@ def _parse_select(self, select: str, placeholders: Dict[str, Any] = None): return clear_locations(parse_select(select, placeholders=placeholders)) def _print_ast(self, node: ast.Expr): - return print_ast(node, dialect="hogql", context=HogQLContext(team_id=self.team.pk, enable_select_queries=True)) + return print_ast( + node, + dialect="hogql", + context=HogQLContext(team_id=self.team.pk, enable_select_queries=True), + ) def test_replace_filters(self): select = replace_filters(self._parse_select("SELECT event FROM events"), HogQLFilters(), self.team) self.assertEqual(self._print_ast(select), "SELECT event FROM events LIMIT 10000") select = replace_filters( - self._parse_select("SELECT event FROM events where {filters}"), HogQLFilters(), self.team + self._parse_select("SELECT event FROM events where {filters}"), + HogQLFilters(), + self.team, ) self.assertEqual(self._print_ast(select), "SELECT event FROM events WHERE true LIMIT 10000") @@ -59,7 +70,8 @@ def test_replace_filters(self): self.team, ) self.assertEqual( - self._print_ast(select), "SELECT event FROM events WHERE equals(properties.random_uuid, '123') LIMIT 10000" + self._print_ast(select), + "SELECT event FROM events WHERE equals(properties.random_uuid, '123') LIMIT 10000", ) select = replace_filters( diff --git a/posthog/hogql/test/test_metadata.py b/posthog/hogql/test/test_metadata.py index 46f9e13cc04f3..fe440243e909d 100644 --- a/posthog/hogql/test/test_metadata.py +++ b/posthog/hogql/test/test_metadata.py @@ -9,12 +9,14 @@ class TestMetadata(ClickhouseTestMixin, APIBaseTest): def _expr(self, query: str, table: str = "events") -> HogQLMetadataResponse: return get_hogql_metadata( - query=HogQLMetadata(kind="HogQLMetadata", expr=query, table=table, response=None), team=self.team + query=HogQLMetadata(kind="HogQLMetadata", expr=query, table=table, response=None), + team=self.team, ) def _select(self, query: str) -> HogQLMetadataResponse: return get_hogql_metadata( - query=HogQLMetadata(kind="HogQLMetadata", select=query, response=None), team=self.team + query=HogQLMetadata(kind="HogQLMetadata", select=query, response=None), + team=self.team, ) def test_metadata_valid_expr_select(self): @@ -26,7 +28,14 @@ def test_metadata_valid_expr_select(self): "isValid": False, "inputExpr": "select 1", "inputSelect": None, - "errors": [{"message": "extraneous input '1' expecting ", "start": 7, "end": 8, "fix": None}], + "errors": [ + { + "message": "extraneous input '1' expecting ", + "start": 7, + "end": 8, + "fix": None, + } + ], }, ) diff --git a/posthog/hogql/test/test_modifiers.py b/posthog/hogql/test/test_modifiers.py index b876d7ada529d..ba5ed58e84882 100644 --- a/posthog/hogql/test/test_modifiers.py +++ b/posthog/hogql/test/test_modifiers.py @@ -13,11 +13,13 @@ def test_create_default_modifiers_for_team_init(self): modifiers = create_default_modifiers_for_team(self.team) assert modifiers.personsOnEventsMode == PersonsOnEventsMode.disabled # NB! not a None modifiers = create_default_modifiers_for_team( - self.team, HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v1_enabled) + self.team, + HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v1_enabled), ) assert modifiers.personsOnEventsMode == PersonsOnEventsMode.v1_enabled modifiers = create_default_modifiers_for_team( - self.team, HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v2_enabled) + self.team, + HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v2_enabled), ) assert modifiers.personsOnEventsMode == PersonsOnEventsMode.v2_enabled @@ -26,13 +28,17 @@ def test_modifiers_persons_on_events_mode_v1_enabled(self): # Control response = execute_hogql_query( - query, team=self.team, modifiers=HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.disabled) + query, + team=self.team, + modifiers=HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.disabled), ) assert " JOIN " in response.clickhouse # Test response = execute_hogql_query( - query, team=self.team, modifiers=HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v1_enabled) + query, + team=self.team, + modifiers=HogQLQueryModifiers(personsOnEventsMode=PersonsOnEventsMode.v1_enabled), ) assert " JOIN " not in response.clickhouse @@ -70,9 +76,11 @@ def test_modifiers_persons_on_events_mode_mapping(self): ), ] - for (mode, *expected) in test_cases: + for mode, *expected in test_cases: response = execute_hogql_query( - query, team=self.team, modifiers=HogQLQueryModifiers(personsOnEventsMode=mode) + query, + team=self.team, + modifiers=HogQLQueryModifiers(personsOnEventsMode=mode), ) assert f"SELECT {', '.join(expected)} FROM" in response.clickhouse, f"PoE mode: {mode}" @@ -80,11 +88,19 @@ def test_modifiers_persons_argmax_version_v2(self): query = "SELECT * FROM persons" # Control (v1) - response = execute_hogql_query(query, team=self.team, modifiers=HogQLQueryModifiers(personsArgMaxVersion="v1")) + response = execute_hogql_query( + query, + team=self.team, + modifiers=HogQLQueryModifiers(personsArgMaxVersion="v1"), + ) assert "in(tuple(person.id, person.version)" not in response.clickhouse # Test (v2) - response = execute_hogql_query(query, team=self.team, modifiers=HogQLQueryModifiers(personsArgMaxVersion="v2")) + response = execute_hogql_query( + query, + team=self.team, + modifiers=HogQLQueryModifiers(personsArgMaxVersion="v2"), + ) assert "in(tuple(person.id, person.version)" in response.clickhouse def test_modifiers_persons_argmax_version_auto(self): diff --git a/posthog/hogql/test/test_placeholders.py b/posthog/hogql/test/test_placeholders.py index 6906104795775..88c92ebfc8fe8 100644 --- a/posthog/hogql/test/test_placeholders.py +++ b/posthog/hogql/test/test_placeholders.py @@ -26,11 +26,15 @@ def test_replace_placeholders_error(self): expr = ast.Placeholder(field="foo") with self.assertRaises(HogQLException) as context: replace_placeholders(expr, {}) - self.assertEqual("Placeholders, such as {foo}, are not supported in this context", str(context.exception)) + self.assertEqual( + "Placeholders, such as {foo}, are not supported in this context", + str(context.exception), + ) with self.assertRaises(HogQLException) as context: replace_placeholders(expr, {"bar": ast.Constant(value=123)}) self.assertEqual( - "Placeholder {foo} is not available in this context. You can use the following: bar", str(context.exception) + "Placeholder {foo} is not available in this context. You can use the following: bar", + str(context.exception), ) def test_replace_placeholders_comparison(self): @@ -61,4 +65,7 @@ def test_assert_no_placeholders(self): expr = ast.Placeholder(field="foo") with self.assertRaises(HogQLException) as context: replace_placeholders(expr, None) - self.assertEqual("Placeholders, such as {foo}, are not supported in this context", str(context.exception)) + self.assertEqual( + "Placeholders, such as {foo}, are not supported in this context", + str(context.exception), + ) diff --git a/posthog/hogql/test/test_printer.py b/posthog/hogql/test/test_printer.py index 3861bd77fce42..75f182a618f54 100644 --- a/posthog/hogql/test/test_printer.py +++ b/posthog/hogql/test/test_printer.py @@ -23,13 +23,19 @@ class TestPrinter(BaseTest): # Helper to always translate HogQL with a blank context def _expr( - self, query: str, context: Optional[HogQLContext] = None, dialect: Literal["hogql", "clickhouse"] = "clickhouse" + self, + query: str, + context: Optional[HogQLContext] = None, + dialect: Literal["hogql", "clickhouse"] = "clickhouse", ) -> str: return translate_hogql(query, context or HogQLContext(team_id=self.team.pk), dialect) # Helper to always translate HogQL with a blank context, def _select( - self, query: str, context: Optional[HogQLContext] = None, placeholders: Optional[Dict[str, ast.Expr]] = None + self, + query: str, + context: Optional[HogQLContext] = None, + placeholders: Optional[Dict[str, ast.Expr]] = None, ) -> str: return print_ast( parse_select(query, placeholders=placeholders), @@ -37,7 +43,12 @@ def _select( "clickhouse", ) - def _assert_expr_error(self, expr, expected_error, dialect: Literal["hogql", "clickhouse"] = "clickhouse"): + def _assert_expr_error( + self, + expr, + expected_error, + dialect: Literal["hogql", "clickhouse"] = "clickhouse", + ): with self.assertRaises(HogQLException) as context: self._expr(expr, None, dialect) if expected_error not in str(context.exception): @@ -90,9 +101,13 @@ def test_tuples(self): self.assertEqual(self._expr("(1,2,[])"), "tuple(1, 2, [])") def test_lambdas(self): - self.assertEqual(self._expr("arrayMap(x -> x*2, [1,2,3])"), "arrayMap(x -> multiply(x, 2), [1, 2, 3])") self.assertEqual( - self._expr("arrayMap((x, y) -> x*y, [1,2,3])"), "arrayMap((x, y) -> multiply(x, y), [1, 2, 3])" + self._expr("arrayMap(x -> x*2, [1,2,3])"), + "arrayMap(x -> multiply(x, 2), [1, 2, 3])", + ) + self.assertEqual( + self._expr("arrayMap((x, y) -> x*y, [1,2,3])"), + "arrayMap((x, y) -> multiply(x, y), [1, 2, 3])", ) def test_equals_null(self): @@ -162,7 +177,11 @@ def test_hogql_properties(self): "person", ) self.assertEqual( - self._expr("person.properties.$browser", HogQLContext(team_id=self.team.pk), "hogql"), + self._expr( + "person.properties.$browser", + HogQLContext(team_id=self.team.pk), + "hogql", + ), "person.properties.$browser", ) self.assertEqual( @@ -170,23 +189,43 @@ def test_hogql_properties(self): "properties.$browser", ) self.assertEqual( - self._expr("properties.`$browser with a space`", HogQLContext(team_id=self.team.pk), "hogql"), + self._expr( + "properties.`$browser with a space`", + HogQLContext(team_id=self.team.pk), + "hogql", + ), "properties.`$browser with a space`", ) self.assertEqual( - self._expr('properties."$browser with a space"', HogQLContext(team_id=self.team.pk), "hogql"), + self._expr( + 'properties."$browser with a space"', + HogQLContext(team_id=self.team.pk), + "hogql", + ), "properties.`$browser with a space`", ) self.assertEqual( - self._expr("properties['$browser with a space']", HogQLContext(team_id=self.team.pk), "hogql"), + self._expr( + "properties['$browser with a space']", + HogQLContext(team_id=self.team.pk), + "hogql", + ), "properties.`$browser with a space`", ) self.assertEqual( - self._expr("properties['$browser with a ` tick']", HogQLContext(team_id=self.team.pk), "hogql"), + self._expr( + "properties['$browser with a ` tick']", + HogQLContext(team_id=self.team.pk), + "hogql", + ), "properties.`$browser with a \\` tick`", ) self.assertEqual( - self._expr("properties['$browser \\\\with a \\n` tick']", HogQLContext(team_id=self.team.pk), "hogql"), + self._expr( + "properties['$browser \\\\with a \\n` tick']", + HogQLContext(team_id=self.team.pk), + "hogql", + ), "properties.`$browser \\\\with a \\n\\` tick`", ) # "dot NUMBER" means "tuple access" in clickhouse. To access strings properties, wrap them in `backquotes` @@ -198,7 +237,11 @@ def test_hogql_properties(self): self._expr("properties.`1`", HogQLContext(team_id=self.team.pk), "hogql"), "properties.`1`", ) - self._assert_expr_error("properties.'no strings'", "no viable alternative at input '.'no strings'", "hogql") + self._assert_expr_error( + "properties.'no strings'", + "no viable alternative at input '.'no strings'", + "hogql", + ) def test_hogql_properties_json(self): context = HogQLContext(team_id=self.team.pk) @@ -206,7 +249,10 @@ def test_hogql_properties_json(self): self._expr("properties.nomat.json.yet", context), "replaceRegexpAll(nullIf(nullIf(JSONExtractRaw(events.properties, %(hogql_val_0)s, %(hogql_val_1)s, %(hogql_val_2)s), ''), 'null'), '^\"|\"$', '')", ) - self.assertEqual(context.values, {"hogql_val_0": "nomat", "hogql_val_1": "json", "hogql_val_2": "yet"}) + self.assertEqual( + context.values, + {"hogql_val_0": "nomat", "hogql_val_1": "json", "hogql_val_2": "yet"}, + ) def test_hogql_properties_materialized_json_access(self): try: @@ -232,11 +278,15 @@ def test_materialized_fields_and_properties(self): self.assertEqual(1 + 2, 3) return materialize("events", "$browser") - self.assertEqual(self._expr("properties['$browser']"), "nullIf(nullIf(events.`mat_$browser`, ''), 'null')") + self.assertEqual( + self._expr("properties['$browser']"), + "nullIf(nullIf(events.`mat_$browser`, ''), 'null')", + ) materialize("events", "withoutdollar") self.assertEqual( - self._expr("properties['withoutdollar']"), "nullIf(nullIf(events.mat_withoutdollar, ''), 'null')" + self._expr("properties['withoutdollar']"), + "nullIf(nullIf(events.mat_withoutdollar, ''), 'null')", ) materialize("events", "$browser and string") @@ -247,13 +297,17 @@ def test_materialized_fields_and_properties(self): materialize("events", "$browser%%%#@!@") self.assertEqual( - self._expr("properties['$browser%%%#@!@']"), "nullIf(nullIf(events.`mat_$browser_______`, ''), 'null')" + self._expr("properties['$browser%%%#@!@']"), + "nullIf(nullIf(events.`mat_$browser_______`, ''), 'null')", ) def test_methods(self): self.assertEqual(self._expr("count()"), "count()") self.assertEqual(self._expr("count(distinct event)"), "count(DISTINCT events.event)") - self.assertEqual(self._expr("countIf(distinct event, 1 == 2)"), "countIf(DISTINCT events.event, 0)") + self.assertEqual( + self._expr("countIf(distinct event, 1 == 2)"), + "countIf(DISTINCT events.event, 0)", + ) self.assertEqual(self._expr("sumIf(1, 1 == 2)"), "sumIf(1, 0)") def test_functions(self): @@ -269,32 +323,49 @@ def test_expr_parse_errors(self): self._assert_expr_error("avg(bla)", "Unable to resolve field: bla") self._assert_expr_error("count(1,2,3,4)", "Aggregation 'count' expects at most 1 argument, found 4") self._assert_expr_error("countIf()", "Aggregation 'countIf' expects at least 1 argument, found 0") - self._assert_expr_error("countIf(2,3,4)", "Aggregation 'countIf' expects at most 2 arguments, found 3") + self._assert_expr_error( + "countIf(2,3,4)", + "Aggregation 'countIf' expects at most 2 arguments, found 3", + ) self._assert_expr_error("uniq()", "Aggregation 'uniq' expects at least 1 argument, found 0") self._assert_expr_error( - "quantile(event)", "Aggregation 'quantile' requires parameters in addition to arguments" + "quantile(event)", + "Aggregation 'quantile' requires parameters in addition to arguments", ) self._assert_expr_error( - "quantile()(event)", "Aggregation 'quantile' requires parameters in addition to arguments" + "quantile()(event)", + "Aggregation 'quantile' requires parameters in addition to arguments", + ) + self._assert_expr_error( + "quantile(0.5, 2)(event)", + "Aggregation 'quantile' expects 1 parameter, found 2", ) - self._assert_expr_error("quantile(0.5, 2)(event)", "Aggregation 'quantile' expects 1 parameter, found 2") self._assert_expr_error("sparkline()", "Function 'sparkline' expects 1 argument, found 0") self._assert_expr_error("hamburger(event)", "Unsupported function call 'hamburger(...)'") self._assert_expr_error("mad(event)", "Unsupported function call 'mad(...)'") - self._assert_expr_error("noway(event)", "Unsupported function call 'noway(...)'. Perhaps you meant 'now(...)'?") self._assert_expr_error( - "tostring(event)", "Unsupported function call 'tostring(...)'. Perhaps you meant 'toString(...)'?" + "noway(event)", + "Unsupported function call 'noway(...)'. Perhaps you meant 'now(...)'?", + ) + self._assert_expr_error( + "tostring(event)", + "Unsupported function call 'tostring(...)'. Perhaps you meant 'toString(...)'?", ) self._assert_expr_error("yeet.the.cloud", "Unable to resolve field: yeet") self._assert_expr_error("chipotle", "Unable to resolve field: chipotle") self._assert_expr_error( - "avg(avg(properties.bla))", "Aggregation 'avg' cannot be nested inside another aggregation 'avg'." + "avg(avg(properties.bla))", + "Aggregation 'avg' cannot be nested inside another aggregation 'avg'.", ) self._assert_expr_error("person.chipotle", "Field not found: chipotle") self._assert_expr_error("properties.0", "SQL indexes start from one, not from zero. E.g: array[1]") - self._assert_expr_error("properties.id.0", "SQL indexes start from one, not from zero. E.g: array[1]") self._assert_expr_error( - "event as `as%d`", 'The HogQL identifier "as%d" is not permitted as it contains the "%" character' + "properties.id.0", + "SQL indexes start from one, not from zero. E.g: array[1]", + ) + self._assert_expr_error( + "event as `as%d`", + 'The HogQL identifier "as%d" is not permitted as it contains the "%" character', ) @override_settings(PERSON_ON_EVENTS_OVERRIDE=True, PERSON_ON_EVENTS_V2_OVERRIDE=True) @@ -337,24 +408,55 @@ def test_logic(self): def test_comparisons(self): context = HogQLContext(team_id=self.team.pk) self.assertEqual(self._expr("event == 'E'", context), "equals(events.event, %(hogql_val_0)s)") - self.assertEqual(self._expr("event != 'E'", context), "notEquals(events.event, %(hogql_val_1)s)") + self.assertEqual( + self._expr("event != 'E'", context), + "notEquals(events.event, %(hogql_val_1)s)", + ) self.assertEqual(self._expr("event > 'E'", context), "greater(events.event, %(hogql_val_2)s)") - self.assertEqual(self._expr("event >= 'E'", context), "greaterOrEquals(events.event, %(hogql_val_3)s)") + self.assertEqual( + self._expr("event >= 'E'", context), + "greaterOrEquals(events.event, %(hogql_val_3)s)", + ) self.assertEqual(self._expr("event < 'E'", context), "less(events.event, %(hogql_val_4)s)") - self.assertEqual(self._expr("event <= 'E'", context), "lessOrEquals(events.event, %(hogql_val_5)s)") + self.assertEqual( + self._expr("event <= 'E'", context), + "lessOrEquals(events.event, %(hogql_val_5)s)", + ) self.assertEqual(self._expr("event like 'E'", context), "like(events.event, %(hogql_val_6)s)") - self.assertEqual(self._expr("event not like 'E'", context), "notLike(events.event, %(hogql_val_7)s)") - self.assertEqual(self._expr("event ilike 'E'", context), "ilike(events.event, %(hogql_val_8)s)") - self.assertEqual(self._expr("event not ilike 'E'", context), "notILike(events.event, %(hogql_val_9)s)") + self.assertEqual( + self._expr("event not like 'E'", context), + "notLike(events.event, %(hogql_val_7)s)", + ) + self.assertEqual( + self._expr("event ilike 'E'", context), + "ilike(events.event, %(hogql_val_8)s)", + ) + self.assertEqual( + self._expr("event not ilike 'E'", context), + "notILike(events.event, %(hogql_val_9)s)", + ) self.assertEqual(self._expr("event in 'E'", context), "in(events.event, %(hogql_val_10)s)") - self.assertEqual(self._expr("event not in 'E'", context), "notIn(events.event, %(hogql_val_11)s)") + self.assertEqual( + self._expr("event not in 'E'", context), + "notIn(events.event, %(hogql_val_11)s)", + ) self.assertEqual(self._expr("event ~ 'E'", context), "match(events.event, %(hogql_val_12)s)") self.assertEqual(self._expr("event =~ 'E'", context), "match(events.event, %(hogql_val_13)s)") - self.assertEqual(self._expr("event !~ 'E'", context), "not(match(events.event, %(hogql_val_14)s))") - self.assertEqual(self._expr("event ~* 'E'", context), "match(events.event, concat('(?i)', %(hogql_val_15)s))") - self.assertEqual(self._expr("event =~* 'E'", context), "match(events.event, concat('(?i)', %(hogql_val_16)s))") self.assertEqual( - self._expr("event !~* 'E'", context), "not(match(events.event, concat('(?i)', %(hogql_val_17)s)))" + self._expr("event !~ 'E'", context), + "not(match(events.event, %(hogql_val_14)s))", + ) + self.assertEqual( + self._expr("event ~* 'E'", context), + "match(events.event, concat('(?i)', %(hogql_val_15)s))", + ) + self.assertEqual( + self._expr("event =~* 'E'", context), + "match(events.event, concat('(?i)', %(hogql_val_16)s))", + ) + self.assertEqual( + self._expr("event !~* 'E'", context), + "not(match(events.event, concat('(?i)', %(hogql_val_17)s)))", ) def test_comments(self): @@ -369,31 +471,48 @@ def test_values(self): self._expr("coalesce(4.2, 5, 'lol', 'hoo')", context), "coalesce(4.2, 5, %(hogql_val_1)s, %(hogql_val_2)s)", ) - self.assertEqual(context.values, {"hogql_val_0": "E", "hogql_val_1": "lol", "hogql_val_2": "hoo"}) + self.assertEqual( + context.values, + {"hogql_val_0": "E", "hogql_val_1": "lol", "hogql_val_2": "hoo"}, + ) def test_alias_keywords(self): self._assert_expr_error( - "1 as team_id", '"team_id" cannot be an alias or identifier, as it\'s a reserved keyword' + "1 as team_id", + '"team_id" cannot be an alias or identifier, as it\'s a reserved keyword', + ) + self._assert_expr_error( + "1 as true", + '"true" cannot be an alias or identifier, as it\'s a reserved keyword', ) - self._assert_expr_error("1 as true", '"true" cannot be an alias or identifier, as it\'s a reserved keyword') self._assert_select_error( - "select 1 as team_id from events", '"team_id" cannot be an alias or identifier, as it\'s a reserved keyword' + "select 1 as team_id from events", + '"team_id" cannot be an alias or identifier, as it\'s a reserved keyword', ) self.assertEqual( self._select("select 1 as `-- select team_id` from events"), f"SELECT 1 AS `-- select team_id` FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000", ) # Some aliases are funny, but that's what the antlr syntax permits, and ClickHouse doesn't complain either - self.assertEqual(self._expr("event makes little sense"), "((events.event AS makes) AS little) AS sense") + self.assertEqual( + self._expr("event makes little sense"), + "((events.event AS makes) AS little) AS sense", + ) def test_case_when(self): self.assertEqual(self._expr("case when 1 then 2 else 3 end"), "if(1, 2, 3)") def test_case_when_many(self): - self.assertEqual(self._expr("case when 1 then 2 when 3 then 4 else 5 end"), "multiIf(1, 2, 3, 4, 5)") + self.assertEqual( + self._expr("case when 1 then 2 when 3 then 4 else 5 end"), + "multiIf(1, 2, 3, 4, 5)", + ) def test_case_when_case(self): - self.assertEqual(self._expr("case 0 when 1 then 2 when 3 then 4 else 5 end"), "transform(0, [1, 3], [2, 4], 5)") + self.assertEqual( + self._expr("case 0 when 1 then 2 when 3 then 4 else 5 end"), + "transform(0, [1, 3], [2, 4], 5)", + ) def test_select(self): self.assertEqual(self._select("select 1"), "SELECT 1 LIMIT 10000") @@ -421,19 +540,29 @@ def test_select_from(self): def test_select_from_placeholder(self): self.assertEqual( - self._select("select 1 from {placeholder}", placeholders={"placeholder": ast.Field(chain=["events"])}), - f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000", - ) - with self.assertRaises(HogQLException) as error_context: self._select( "select 1 from {placeholder}", - placeholders={ - "placeholder": ast.CompareOperation( - left=ast.Constant(value=1), right=ast.Constant(value=1), op=ast.CompareOperationOp.Eq - ) - }, + placeholders={"placeholder": ast.Field(chain=["events"])}, ), - self.assertEqual(str(error_context.exception), "JoinExpr with table of type CompareOperation not supported") + f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000", + ) + with self.assertRaises(HogQLException) as error_context: + ( + self._select( + "select 1 from {placeholder}", + placeholders={ + "placeholder": ast.CompareOperation( + left=ast.Constant(value=1), + right=ast.Constant(value=1), + op=ast.CompareOperationOp.Eq, + ) + }, + ), + ) + self.assertEqual( + str(error_context.exception), + "JoinExpr with table of type CompareOperation not supported", + ) def test_select_cross_join(self): self.assertEqual( @@ -702,13 +831,16 @@ def test_count_if_distinct(self): def test_print_timezone(self): context = HogQLContext( - team_id=self.team.pk, enable_select_queries=True, database=Database(None, WeekStartDay.SUNDAY) + team_id=self.team.pk, + enable_select_queries=True, + database=Database(None, WeekStartDay.SUNDAY), ) context.database.events.fields["test_date"] = DateDatabaseField(name="test_date") # type: ignore self.assertEqual( self._select( - "SELECT now(), toDateTime(timestamp), toDate(test_date), toDateTime('2020-02-02') FROM events", context + "SELECT now(), toDateTime(timestamp), toDate(test_date), toDateTime('2020-02-02') FROM events", + context, ), f"SELECT now64(6, %(hogql_val_0)s), toDateTime(toTimeZone(events.timestamp, %(hogql_val_1)s), %(hogql_val_2)s), toDate(events.test_date, %(hogql_val_3)s), parseDateTime64BestEffortOrNull(%(hogql_val_4)s, 6, %(hogql_val_5)s) FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000", ) @@ -729,7 +861,10 @@ def test_print_timezone_custom(self): self.team.save() context = HogQLContext(team_id=self.team.pk, enable_select_queries=True) self.assertEqual( - self._select("SELECT now(), toDateTime(timestamp), toDateTime('2020-02-02') FROM events", context), + self._select( + "SELECT now(), toDateTime(timestamp), toDateTime('2020-02-02') FROM events", + context, + ), f"SELECT now64(6, %(hogql_val_0)s), toDateTime(toTimeZone(events.timestamp, %(hogql_val_1)s), %(hogql_val_2)s), parseDateTime64BestEffortOrNull(%(hogql_val_3)s, 6, %(hogql_val_4)s) FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000", ) self.assertEqual( @@ -749,7 +884,10 @@ def test_print_timezone_gibberish(self): context = HogQLContext(team_id=self.team.pk, enable_select_queries=True) with self.assertRaises(HogQLException) as error_context: - self._select("SELECT now(), toDateTime(timestamp), toDateTime('2020-02-02') FROM events", context) + self._select( + "SELECT now(), toDateTime(timestamp), toDateTime('2020-02-02') FROM events", + context, + ) self.assertEqual(str(error_context.exception), "Unknown timezone: 'Europe/PostHogLandia'") def test_window_functions(self): @@ -907,7 +1045,11 @@ def test_print_global_settings(self): def test_print_query_level_settings(self): query = parse_select("SELECT 1 FROM events") query.settings = HogQLQuerySettings(optimize_aggregation_in_order=True) - printed = print_ast(query, HogQLContext(team_id=self.team.pk, enable_select_queries=True), "clickhouse") + printed = print_ast( + query, + HogQLContext(team_id=self.team.pk, enable_select_queries=True), + "clickhouse", + ) self.assertEqual( printed, f"SELECT 1 FROM events WHERE equals(events.team_id, {self.team.pk}) LIMIT 10000 SETTINGS optimize_aggregation_in_order=1", diff --git a/posthog/hogql/test/test_property.py b/posthog/hogql/test/test_property.py index 1e57589805645..c0ed528ea4da9 100644 --- a/posthog/hogql/test/test_property.py +++ b/posthog/hogql/test/test_property.py @@ -12,7 +12,14 @@ tag_name_to_expr, ) from posthog.hogql.visitor import clear_locations -from posthog.models import Action, ActionStep, Cohort, Property, PropertyDefinition, Team +from posthog.models import ( + Action, + ActionStep, + Cohort, + Property, + PropertyDefinition, + Team, +) from posthog.models.property import PropertyGroup from posthog.models.property_definition import PropertyType from posthog.schema import HogQLPropertyFilter, PropertyOperator @@ -49,8 +56,14 @@ def test_has_aggregation(self): def test_property_to_expr_hogql(self): self.assertEqual(self._property_to_expr({"type": "hogql", "key": "1"}), ast.Constant(value=1)) - self.assertEqual(self._property_to_expr(Property(type="hogql", key="1")), ast.Constant(value=1)) - self.assertEqual(self._property_to_expr(HogQLPropertyFilter(type="hogql", key="1")), ast.Constant(value=1)) + self.assertEqual( + self._property_to_expr(Property(type="hogql", key="1")), + ast.Constant(value=1), + ) + self.assertEqual( + self._property_to_expr(HogQLPropertyFilter(type="hogql", key="1")), + ast.Constant(value=1), + ) def test_property_to_expr_event(self): self.assertEqual( @@ -128,7 +141,10 @@ def test_property_to_expr_boolean(self): property_type=PropertyType.String, ) self.assertEqual( - self._property_to_expr({"type": "event", "key": "boolean_prop", "value": "true"}, team=self.team), + self._property_to_expr( + {"type": "event", "key": "boolean_prop", "value": "true"}, + team=self.team, + ), self._parse_expr("properties.boolean_prop = true"), ) self.assertEqual( @@ -136,7 +152,10 @@ def test_property_to_expr_boolean(self): self._parse_expr("properties.string_prop = 'true'"), ) self.assertEqual( - self._property_to_expr({"type": "event", "key": "unknown_prop", "value": "true"}, team=self.team), + self._property_to_expr( + {"type": "event", "key": "unknown_prop", "value": "true"}, + team=self.team, + ), self._parse_expr("properties.unknown_prop = true"), ) @@ -147,7 +166,14 @@ def test_property_to_expr_event_list(self): self._parse_expr("properties.a = 'b' or properties.a = 'c'"), ) self.assertEqual( - self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "icontains"}), + self._property_to_expr( + { + "type": "event", + "key": "a", + "value": ["b", "c"], + "operator": "icontains", + } + ), self._parse_expr("properties.a ilike '%b%' or properties.a ilike '%c%'"), ) self.assertEqual( @@ -160,11 +186,25 @@ def test_property_to_expr_event_list(self): self._parse_expr("properties.a != 'b' and properties.a != 'c'"), ) self.assertEqual( - self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "not_icontains"}), + self._property_to_expr( + { + "type": "event", + "key": "a", + "value": ["b", "c"], + "operator": "not_icontains", + } + ), self._parse_expr("properties.a not ilike '%b%' and properties.a not ilike '%c%'"), ) self.assertEqual( - self._property_to_expr({"type": "event", "key": "a", "value": ["b", "c"], "operator": "not_regex"}), + self._property_to_expr( + { + "type": "event", + "key": "a", + "value": ["b", "c"], + "operator": "not_regex", + } + ), self._parse_expr("not(match(properties.a, 'b')) and not(match(properties.a, 'c'))"), ) @@ -182,27 +222,69 @@ def test_property_to_expr_person(self): def test_property_to_expr_element(self): self.assertEqual( - self._property_to_expr({"type": "element", "key": "selector", "value": "div", "operator": "exact"}), + self._property_to_expr( + { + "type": "element", + "key": "selector", + "value": "div", + "operator": "exact", + } + ), self._selector_to_expr("div"), ) self.assertEqual( - self._property_to_expr({"type": "element", "key": "selector", "value": "div", "operator": "is_not"}), + self._property_to_expr( + { + "type": "element", + "key": "selector", + "value": "div", + "operator": "is_not", + } + ), clear_locations(not_call(self._selector_to_expr("div"))), ) self.assertEqual( - self._property_to_expr({"type": "element", "key": "tag_name", "value": "div", "operator": "exact"}), + self._property_to_expr( + { + "type": "element", + "key": "tag_name", + "value": "div", + "operator": "exact", + } + ), clear_locations(tag_name_to_expr("div")), ) self.assertEqual( - self._property_to_expr({"type": "element", "key": "tag_name", "value": "div", "operator": "is_not"}), + self._property_to_expr( + { + "type": "element", + "key": "tag_name", + "value": "div", + "operator": "is_not", + } + ), clear_locations(not_call(tag_name_to_expr("div"))), ) self.assertEqual( - self._property_to_expr({"type": "element", "key": "href", "value": "href-text.", "operator": "exact"}), + self._property_to_expr( + { + "type": "element", + "key": "href", + "value": "href-text.", + "operator": "exact", + } + ), clear_locations(element_chain_key_filter("href", "href-text.", PropertyOperator.exact)), ) self.assertEqual( - self._property_to_expr({"type": "element", "key": "text", "value": "text-text.", "operator": "regex"}), + self._property_to_expr( + { + "type": "element", + "key": "text", + "value": "text-text.", + "operator": "regex", + } + ), clear_locations(element_chain_key_filter("text", "text-text.", PropertyOperator.regex)), ) @@ -259,7 +341,8 @@ def test_property_groups_single(self): self.assertEqual( self._property_to_expr( PropertyGroup( - type=PropertyOperatorType.OR, values=[Property(type="event", key="e", value="b", operator="exact")] + type=PropertyOperatorType.OR, + values=[Property(type="event", key="e", value="b", operator="exact")], ) ), self._parse_expr("properties.e = 'b'"), @@ -277,8 +360,18 @@ def test_property_groups_combined(self): PropertyGroup( type=PropertyOperatorType.OR, values=[ - Property(type="person", key="a", value="b", operator="exact"), - Property(type="event", key="e", value="b", operator="exact"), + Property( + type="person", + key="a", + value="b", + operator="exact", + ), + Property( + type="event", + key="e", + value="b", + operator="exact", + ), ], ), ], @@ -290,7 +383,8 @@ def test_property_groups_combined(self): def test_tag_name_to_expr(self): self.assertEqual( - clear_locations(tag_name_to_expr("a")), clear_locations(elements_chain_match("(^|;)a(\\.|$|;|:)")) + clear_locations(tag_name_to_expr("a")), + clear_locations(elements_chain_match("(^|;)a(\\.|$|;|:)")), ) def test_selector_to_expr(self): @@ -379,7 +473,12 @@ def test_elements_chain_key_filter(self): def test_action_to_expr(self): action1 = Action.objects.create(team=self.team) - ActionStep.objects.create(event="$autocapture", action=action1, selector="a.nav-link.active", tag_name="a") + ActionStep.objects.create( + event="$autocapture", + action=action1, + selector="a.nav-link.active", + tag_name="a", + ) self.assertEqual( clear_locations(action_to_expr(action1)), self._parse_expr( @@ -394,15 +493,30 @@ def test_action_to_expr(self): ) action2 = Action.objects.create(team=self.team) - ActionStep.objects.create(event="$pageview", action=action2, url="https://example.com", url_matching="contains") + ActionStep.objects.create( + event="$pageview", + action=action2, + url="https://example.com", + url_matching="contains", + ) self.assertEqual( clear_locations(action_to_expr(action2)), self._parse_expr("event = '$pageview' and properties.$current_url like '%https://example.com%'"), ) action3 = Action.objects.create(team=self.team) - ActionStep.objects.create(event="$pageview", action=action3, url="https://example2.com", url_matching="regex") - ActionStep.objects.create(event="custom", action=action3, url="https://example3.com", url_matching="exact") + ActionStep.objects.create( + event="$pageview", + action=action3, + url="https://example2.com", + url_matching="regex", + ) + ActionStep.objects.create( + event="custom", + action=action3, + url="https://example3.com", + url_matching="exact", + ) self.assertEqual( clear_locations(action_to_expr(action3)), self._parse_expr( @@ -435,7 +549,8 @@ def test_cohort_filter_static(self): def test_cohort_filter_dynamic(self): cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}] + team=self.team, + groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}], ) self.assertEqual( self._property_to_expr({"type": "cohort", "key": "id", "value": cohort.pk}, self.team), @@ -444,15 +559,25 @@ def test_cohort_filter_dynamic(self): def test_person_scope(self): self.assertEqual( - self._property_to_expr({"type": "person", "key": "a", "value": "b", "operator": "exact"}, scope="event"), + self._property_to_expr( + {"type": "person", "key": "a", "value": "b", "operator": "exact"}, + scope="event", + ), self._parse_expr("person.properties.a = 'b'"), ) self.assertEqual( - self._property_to_expr({"type": "person", "key": "a", "value": "b", "operator": "exact"}, scope="person"), + self._property_to_expr( + {"type": "person", "key": "a", "value": "b", "operator": "exact"}, + scope="person", + ), self._parse_expr("properties.a = 'b'"), ) with self.assertRaises(Exception) as e: - self._property_to_expr({"type": "event", "key": "a", "value": "b", "operator": "exact"}, scope="person") + self._property_to_expr( + {"type": "event", "key": "a", "value": "b", "operator": "exact"}, + scope="person", + ) self.assertEqual( - str(e.exception), "The 'event' property filter only works in 'event' scope, not in 'person' scope" + str(e.exception), + "The 'event' property filter only works in 'event' scope, not in 'person' scope", ) diff --git a/posthog/hogql/test/test_query.py b/posthog/hogql/test/test_query.py index 475a346ff2b5e..0d6cf4342dead 100644 --- a/posthog/hogql/test/test_query.py +++ b/posthog/hogql/test/test_query.py @@ -15,9 +15,17 @@ from posthog.models import Cohort from posthog.models.cohort.util import recalculate_cohortpeople from posthog.models.utils import UUIDT -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.schema import HogQLFilters, EventPropertyFilter, DateRange, QueryTiming -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, + flush_persons_and_events, +) from posthog.warehouse.models import DataWarehouseSavedQuery, DataWarehouseViewLink @@ -38,7 +46,11 @@ def _create_random_events(self) -> str: distinct_id="bla", event="random event", team=self.team, - properties={"random_prop": "don't include", "random_uuid": random_uuid, "index": index}, + properties={ + "random_prop": "don't include", + "random_uuid": random_uuid, + "index": index, + }, ) flush_persons_and_events() return random_uuid @@ -241,7 +253,10 @@ def test_query_joins_pdi_persons(self): ) assert pretty_print_in_tests(response.clickhouse, self.team.pk) == self.snapshot self.assertEqual(response.results[0][0], "bla") - self.assertEqual(response.results[0][1], datetime.datetime(2020, 1, 10, 0, 0, tzinfo=timezone.utc)) + self.assertEqual( + response.results[0][1], + datetime.datetime(2020, 1, 10, 0, 0, tzinfo=timezone.utc), + ) @pytest.mark.usefixtures("unittest_snapshot") def test_query_joins_pdi_person_properties(self): @@ -402,20 +417,42 @@ def test_query_select_person_with_poe_without_joins(self): def test_prop_cohort_basic(self): with freeze_time("2020-01-10"): - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_person( distinct_ids=["some_id"], team_id=self.team.pk, properties={"$some_prop": "something", "$another_prop": "something"}, ) _create_person(distinct_ids=["no_match"], team_id=self.team.pk) - _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}) _create_event( - event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"} + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr": "some_val"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_other_id", + properties={"attr": "some_val"}, ) cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "$some_prop", "value": "something", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "$some_prop", + "value": "something", + "type": "person", + } + ] + } + ], name="cohort", ) recalculate_cohortpeople(cohort, pending_version=0) @@ -425,7 +462,8 @@ def test_prop_cohort_basic(self): team=self.team, placeholders={ "cohort_filter": property_to_expr( - {"type": "cohort", "key": "id", "value": cohort.pk}, self.team + {"type": "cohort", "key": "id", "value": cohort.pk}, + self.team, ) }, ) @@ -441,7 +479,8 @@ def test_prop_cohort_basic(self): team=self.team, placeholders={ "cohort_filter": property_to_expr( - {"type": "cohort", "key": "id", "value": cohort.pk}, self.team + {"type": "cohort", "key": "id", "value": cohort.pk}, + self.team, ) }, ) @@ -456,16 +495,28 @@ def test_prop_cohort_basic(self): def test_prop_cohort_static(self): with freeze_time("2020-01-10"): - _create_person(distinct_ids=["some_other_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_other_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_person( distinct_ids=["some_id"], team_id=self.team.pk, properties={"$some_prop": "something", "$another_prop": "something"}, ) _create_person(distinct_ids=["no_match"], team_id=self.team.pk) - _create_event(event="$pageview", team=self.team, distinct_id="some_id", properties={"attr": "some_val"}) _create_event( - event="$pageview", team=self.team, distinct_id="some_other_id", properties={"attr": "some_val"} + event="$pageview", + team=self.team, + distinct_id="some_id", + properties={"attr": "some_val"}, + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id="some_other_id", + properties={"attr": "some_val"}, ) cohort = Cohort.objects.create(team=self.team, groups=[], is_static=True) cohort.insert_users_by_list(["some_id"]) @@ -476,7 +527,8 @@ def test_prop_cohort_static(self): team=self.team, placeholders={ "cohort_filter": property_to_expr( - {"type": "cohort", "key": "id", "value": cohort.pk}, self.team + {"type": "cohort", "key": "id", "value": cohort.pk}, + self.team, ) }, ) @@ -493,7 +545,8 @@ def test_prop_cohort_static(self): team=self.team, placeholders={ "cohort_filter": property_to_expr( - {"type": "cohort", "key": "id", "value": cohort.pk}, self.team + {"type": "cohort", "key": "id", "value": cohort.pk}, + self.team, ) }, ) @@ -505,7 +558,11 @@ def test_prop_cohort_static(self): def test_join_with_property_materialized_session_id(self): with freeze_time("2020-01-10"): - _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_event( event="$pageview", team=self.team, @@ -519,7 +576,10 @@ def test_join_with_property_materialized_session_id(self): properties={"attr": "some_val", "$session_id": "111"}, ) produce_replay_summary( - distinct_id="some_id", session_id="111", first_timestamp=timezone.now(), team_id=self.team.pk + distinct_id="some_id", + session_id="111", + first_timestamp=timezone.now(), + team_id=self.team.pk, ) response = execute_hogql_query( @@ -544,7 +604,11 @@ def test_join_with_property_materialized_session_id(self): def test_join_with_property_not_materialized(self): with freeze_time("2020-01-10"): - _create_person(distinct_ids=["some_id"], team_id=self.team.pk, properties={"$some_prop": "something"}) + _create_person( + distinct_ids=["some_id"], + team_id=self.team.pk, + properties={"$some_prop": "something"}, + ) _create_event( event="$pageview", team=self.team, @@ -558,7 +622,10 @@ def test_join_with_property_not_materialized(self): properties={"attr": "some_val", "$$$session_id": "111"}, ) produce_replay_summary( - distinct_id="some_id", session_id="111", first_timestamp=timezone.now(), team_id=self.team.pk + distinct_id="some_id", + session_id="111", + first_timestamp=timezone.now(), + team_id=self.team.pk, ) response = execute_hogql_query( @@ -625,7 +692,10 @@ def test_tuple_access(self): query, team=self.team, ) - self.assertEqual(response.results, [("0", [("random event", 1)]), ("1", [("random event", 1)])]) + self.assertEqual( + response.results, + [("0", [("random event", 1)]), ("1", [("random event", 1)])], + ) assert pretty_print_in_tests(response.clickhouse, self.team.pk) == self.snapshot def test_null_properties(self): @@ -635,7 +705,12 @@ def test_null_properties(self): distinct_id="bla", event="empty event", team=self.team, - properties={"empty_string": "", "null": None, "str_zero": "0", "num_zero": 0}, + properties={ + "empty_string": "", + "null": None, + "str_zero": "0", + "num_zero": 0, + }, ) query = """ @@ -881,7 +956,10 @@ def test_with_pivot_table_1_level(self): query, team=self.team, ) - self.assertEqual(response.results, [("0", [("random event", 1)]), ("1", [("random event", 1)])]) + self.assertEqual( + response.results, + [("0", [("random event", 1)]), ("1", [("random event", 1)])], + ) assert pretty_print_in_tests(response.clickhouse, self.team.pk) == self.snapshot @pytest.mark.usefixtures("unittest_snapshot") @@ -917,7 +995,10 @@ def test_with_pivot_table_2_levels(self): query, team=self.team, ) - self.assertEqual(response.results, [("0", [("random event", 1)]), ("1", [("random event", 1)])]) + self.assertEqual( + response.results, + [("0", [("random event", 1)]), ("1", [("random event", 1)])], + ) assert pretty_print_in_tests(response.clickhouse, self.team.pk) == self.snapshot def test_property_access_with_arrays(self): @@ -1308,7 +1389,11 @@ def test_view_link(self): saved_query = DataWarehouseSavedQuery.objects.get(pk=saved_query_response["id"]) DataWarehouseViewLink.objects.create( - saved_query=saved_query, table="events", to_join_key="fake", from_join_key="distinct_id", team=self.team + saved_query=saved_query, + table="events", + to_join_key="fake", + from_join_key="distinct_id", + team=self.team, ) response = execute_hogql_query("SELECT event_view.fake FROM events", team=self.team) @@ -1366,7 +1451,10 @@ def test_hogql_query_filters_double_error(self): query = "SELECT event from events where {filters}" with self.assertRaises(HogQLException) as e: execute_hogql_query( - query, team=self.team, filters=HogQLFilters(), placeholders={"filters": ast.Constant(value=True)} + query, + team=self.team, + filters=HogQLFilters(), + placeholders={"filters": ast.Constant(value=True)}, ) self.assertEqual( str(e.exception), @@ -1378,7 +1466,14 @@ def test_hogql_query_filters_alias(self): random_uuid = self._create_random_events() query = "SELECT event, distinct_id from events e WHERE {filters}" filters = HogQLFilters( - properties=[EventPropertyFilter(key="random_uuid", operator="exact", value=random_uuid, type="event")] + properties=[ + EventPropertyFilter( + key="random_uuid", + operator="exact", + value=random_uuid, + type="event", + ) + ] ) response = execute_hogql_query(query, team=self.team, filters=filters) self.assertEqual( diff --git a/posthog/hogql/test/test_resolver.py b/posthog/hogql/test/test_resolver.py index 7ed33e37291d2..1a946c2c3e769 100644 --- a/posthog/hogql/test/test_resolver.py +++ b/posthog/hogql/test/test_resolver.py @@ -27,11 +27,18 @@ class TestResolver(BaseTest): maxDiff = None def _select(self, query: str, placeholders: Optional[Dict[str, ast.Expr]] = None) -> ast.SelectQuery: - return cast(ast.SelectQuery, clone_expr(parse_select(query, placeholders=placeholders), clear_locations=True)) + return cast( + ast.SelectQuery, + clone_expr(parse_select(query, placeholders=placeholders), clear_locations=True), + ) def _print_hogql(self, select: str): expr = self._select(select) - return print_ast(expr, HogQLContext(team_id=self.team.pk, enable_select_queries=True), "hogql") + return print_ast( + expr, + HogQLContext(team_id=self.team.pk, enable_select_queries=True), + "hogql", + ) def setUp(self): self.database = create_hogql_database(self.team.pk) @@ -80,7 +87,8 @@ def test_will_not_run_twice(self): with self.assertRaises(ResolverException) as context: expr = resolve_types(expr, self.context) self.assertEqual( - str(context.exception), "Type already resolved for SelectQuery (SelectQueryType). Can't run again." + str(context.exception), + "Type already resolved for SelectQuery (SelectQueryType). Can't run again.", ) def test_resolve_events_table_alias(self): @@ -134,11 +142,17 @@ def test_resolve_events_table_column_alias(self): select_query_type = ast.SelectQueryType( aliases={ "ee": ast.FieldAliasType(alias="ee", type=event_field_type), - "e": ast.FieldAliasType(alias="e", type=ast.FieldAliasType(alias="ee", type=event_field_type)), + "e": ast.FieldAliasType( + alias="e", + type=ast.FieldAliasType(alias="ee", type=event_field_type), + ), }, columns={ "ee": ast.FieldAliasType(alias="ee", type=event_field_type), - "e": ast.FieldAliasType(alias="e", type=ast.FieldAliasType(alias="ee", type=event_field_type)), + "e": ast.FieldAliasType( + alias="e", + type=ast.FieldAliasType(alias="ee", type=event_field_type), + ), "timestamp": timestamp_field_type, }, tables={"e": events_table_alias_type}, @@ -184,7 +198,8 @@ def test_resolve_events_table_column_alias_inside_subquery(self): expr = resolve_types(expr, self.context) inner_events_table_type = ast.TableType(table=self.database.events) inner_event_field_type = ast.FieldAliasType( - alias="b", type=ast.FieldType(name="event", table_type=inner_events_table_type) + alias="b", + type=ast.FieldType(name="event", table_type=inner_events_table_type), ) timestamp_field_type = ast.FieldType(name="timestamp", table_type=inner_events_table_type) timstamp_alias_type = ast.FieldAliasType(alias="c", type=timestamp_field_type) @@ -288,13 +303,25 @@ def test_resolve_constant_type(self): ast.Constant(value=1.1232, type=ast.FloatType()), ast.Constant(value=None, type=ast.UnknownType()), ast.Constant(value=date(2020, 1, 10), type=ast.DateType()), - ast.Constant(value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc), type=ast.DateTimeType()), - ast.Constant(value=UUID("00000000-0000-4000-8000-000000000000"), type=ast.UUIDType()), + ast.Constant( + value=datetime(2020, 1, 10, 0, 0, 0, tzinfo=timezone.utc), + type=ast.DateTimeType(), + ), + ast.Constant( + value=UUID("00000000-0000-4000-8000-000000000000"), + type=ast.UUIDType(), + ), ast.Constant(value=[], type=ast.ArrayType(item_type=ast.UnknownType())), ast.Constant(value=[1, 2], type=ast.ArrayType(item_type=ast.IntegerType())), ast.Constant( value=(1, 2, 3), - type=ast.TupleType(item_types=[ast.IntegerType(), ast.IntegerType(), ast.IntegerType()]), + type=ast.TupleType( + item_types=[ + ast.IntegerType(), + ast.IntegerType(), + ast.IntegerType(), + ] + ), ), ], type=ast.SelectQueryType(aliases={}, columns={}, tables={}), @@ -634,7 +661,9 @@ def test_resolve_virtual_events_poe(self): type=ast.FieldType( name="id", table_type=ast.VirtualTableType( - table_type=events_table_type, field="poe", virtual_table=self.database.events.fields["poe"] + table_type=events_table_type, + field="poe", + virtual_table=self.database.events.fields["poe"], ), ), ), @@ -674,15 +703,27 @@ def test_resolve_union_all(self): self.assertEqual( node.select_queries[0].select, [ - ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=events_table_type)), - ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=events_table_type)), + ast.Field( + chain=["event"], + type=ast.FieldType(name="event", table_type=events_table_type), + ), + ast.Field( + chain=["timestamp"], + type=ast.FieldType(name="timestamp", table_type=events_table_type), + ), ], ) self.assertEqual( node.select_queries[1].select, [ - ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=events_table_type)), - ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=events_table_type)), + ast.Field( + chain=["event"], + type=ast.FieldType(name="event", table_type=events_table_type), + ), + ast.Field( + chain=["timestamp"], + type=ast.FieldType(name="timestamp", table_type=events_table_type), + ), ], ) @@ -693,11 +734,18 @@ def test_call_type(self): ast.Call( name="max", # NB! timestamp was resolved to a DateTimeType for the Call's arg type. - type=ast.CallType(name="max", arg_types=[ast.DateTimeType()], return_type=ast.UnknownType()), + type=ast.CallType( + name="max", + arg_types=[ast.DateTimeType()], + return_type=ast.UnknownType(), + ), args=[ ast.Field( chain=["timestamp"], - type=ast.FieldType(name="timestamp", table_type=ast.TableType(table=self.database.events)), + type=ast.FieldType( + name="timestamp", + table_type=ast.TableType(table=self.database.events), + ), ) ], ), @@ -776,21 +824,58 @@ def test_asterisk_expander_table(self): self.assertEqual( node.select, [ - ast.Field(chain=["uuid"], type=ast.FieldType(name="uuid", table_type=events_table_type)), - ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=events_table_type)), - ast.Field(chain=["properties"], type=ast.FieldType(name="properties", table_type=events_table_type)), - ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=events_table_type)), - ast.Field(chain=["distinct_id"], type=ast.FieldType(name="distinct_id", table_type=events_table_type)), - ast.Field( - chain=["elements_chain"], type=ast.FieldType(name="elements_chain", table_type=events_table_type) - ), - ast.Field(chain=["created_at"], type=ast.FieldType(name="created_at", table_type=events_table_type)), - ast.Field(chain=["$session_id"], type=ast.FieldType(name="$session_id", table_type=events_table_type)), - ast.Field(chain=["$group_0"], type=ast.FieldType(name="$group_0", table_type=events_table_type)), - ast.Field(chain=["$group_1"], type=ast.FieldType(name="$group_1", table_type=events_table_type)), - ast.Field(chain=["$group_2"], type=ast.FieldType(name="$group_2", table_type=events_table_type)), - ast.Field(chain=["$group_3"], type=ast.FieldType(name="$group_3", table_type=events_table_type)), - ast.Field(chain=["$group_4"], type=ast.FieldType(name="$group_4", table_type=events_table_type)), + ast.Field( + chain=["uuid"], + type=ast.FieldType(name="uuid", table_type=events_table_type), + ), + ast.Field( + chain=["event"], + type=ast.FieldType(name="event", table_type=events_table_type), + ), + ast.Field( + chain=["properties"], + type=ast.FieldType(name="properties", table_type=events_table_type), + ), + ast.Field( + chain=["timestamp"], + type=ast.FieldType(name="timestamp", table_type=events_table_type), + ), + ast.Field( + chain=["distinct_id"], + type=ast.FieldType(name="distinct_id", table_type=events_table_type), + ), + ast.Field( + chain=["elements_chain"], + type=ast.FieldType(name="elements_chain", table_type=events_table_type), + ), + ast.Field( + chain=["created_at"], + type=ast.FieldType(name="created_at", table_type=events_table_type), + ), + ast.Field( + chain=["$session_id"], + type=ast.FieldType(name="$session_id", table_type=events_table_type), + ), + ast.Field( + chain=["$group_0"], + type=ast.FieldType(name="$group_0", table_type=events_table_type), + ), + ast.Field( + chain=["$group_1"], + type=ast.FieldType(name="$group_1", table_type=events_table_type), + ), + ast.Field( + chain=["$group_2"], + type=ast.FieldType(name="$group_2", table_type=events_table_type), + ), + ast.Field( + chain=["$group_3"], + type=ast.FieldType(name="$group_3", table_type=events_table_type), + ), + ast.Field( + chain=["$group_4"], + type=ast.FieldType(name="$group_4", table_type=events_table_type), + ), ], ) @@ -805,32 +890,58 @@ def test_asterisk_expander_table_alias(self): self.assertEqual( node.select, [ - ast.Field(chain=["uuid"], type=ast.FieldType(name="uuid", table_type=events_table_alias_type)), - ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=events_table_alias_type)), ast.Field( - chain=["properties"], type=ast.FieldType(name="properties", table_type=events_table_alias_type) + chain=["uuid"], + type=ast.FieldType(name="uuid", table_type=events_table_alias_type), ), ast.Field( - chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=events_table_alias_type) + chain=["event"], + type=ast.FieldType(name="event", table_type=events_table_alias_type), ), ast.Field( - chain=["distinct_id"], type=ast.FieldType(name="distinct_id", table_type=events_table_alias_type) + chain=["properties"], + type=ast.FieldType(name="properties", table_type=events_table_alias_type), + ), + ast.Field( + chain=["timestamp"], + type=ast.FieldType(name="timestamp", table_type=events_table_alias_type), + ), + ast.Field( + chain=["distinct_id"], + type=ast.FieldType(name="distinct_id", table_type=events_table_alias_type), ), ast.Field( chain=["elements_chain"], type=ast.FieldType(name="elements_chain", table_type=events_table_alias_type), ), ast.Field( - chain=["created_at"], type=ast.FieldType(name="created_at", table_type=events_table_alias_type) + chain=["created_at"], + type=ast.FieldType(name="created_at", table_type=events_table_alias_type), + ), + ast.Field( + chain=["$session_id"], + type=ast.FieldType(name="$session_id", table_type=events_table_alias_type), + ), + ast.Field( + chain=["$group_0"], + type=ast.FieldType(name="$group_0", table_type=events_table_alias_type), ), ast.Field( - chain=["$session_id"], type=ast.FieldType(name="$session_id", table_type=events_table_alias_type) + chain=["$group_1"], + type=ast.FieldType(name="$group_1", table_type=events_table_alias_type), + ), + ast.Field( + chain=["$group_2"], + type=ast.FieldType(name="$group_2", table_type=events_table_alias_type), + ), + ast.Field( + chain=["$group_3"], + type=ast.FieldType(name="$group_3", table_type=events_table_alias_type), + ), + ast.Field( + chain=["$group_4"], + type=ast.FieldType(name="$group_4", table_type=events_table_alias_type), ), - ast.Field(chain=["$group_0"], type=ast.FieldType(name="$group_0", table_type=events_table_alias_type)), - ast.Field(chain=["$group_1"], type=ast.FieldType(name="$group_1", table_type=events_table_alias_type)), - ast.Field(chain=["$group_2"], type=ast.FieldType(name="$group_2", table_type=events_table_alias_type)), - ast.Field(chain=["$group_3"], type=ast.FieldType(name="$group_3", table_type=events_table_alias_type)), - ast.Field(chain=["$group_4"], type=ast.FieldType(name="$group_4", table_type=events_table_alias_type)), ], ) @@ -852,8 +963,14 @@ def test_asterisk_expander_subquery(self): self.assertEqual( node.select, [ - ast.Field(chain=["a"], type=ast.FieldType(name="a", table_type=select_subquery_type)), - ast.Field(chain=["b"], type=ast.FieldType(name="b", table_type=select_subquery_type)), + ast.Field( + chain=["a"], + type=ast.FieldType(name="a", table_type=select_subquery_type), + ), + ast.Field( + chain=["b"], + type=ast.FieldType(name="b", table_type=select_subquery_type), + ), ], ) @@ -878,8 +995,14 @@ def test_asterisk_expander_subquery_alias(self): self.assertEqual( node.select, [ - ast.Field(chain=["a"], type=ast.FieldType(name="a", table_type=select_subquery_type)), - ast.Field(chain=["b"], type=ast.FieldType(name="b", table_type=select_subquery_type)), + ast.Field( + chain=["a"], + type=ast.FieldType(name="a", table_type=select_subquery_type), + ), + ast.Field( + chain=["b"], + type=ast.FieldType(name="b", table_type=select_subquery_type), + ), ], ) @@ -914,22 +1037,58 @@ def test_asterisk_expander_from_subquery_table(self): self.assertEqual( node.select, [ - ast.Field(chain=["uuid"], type=ast.FieldType(name="uuid", table_type=inner_select_type)), - ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=inner_select_type)), - ast.Field(chain=["properties"], type=ast.FieldType(name="properties", table_type=inner_select_type)), - ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=inner_select_type)), - ast.Field(chain=["distinct_id"], type=ast.FieldType(name="distinct_id", table_type=inner_select_type)), + ast.Field( + chain=["uuid"], + type=ast.FieldType(name="uuid", table_type=inner_select_type), + ), + ast.Field( + chain=["event"], + type=ast.FieldType(name="event", table_type=inner_select_type), + ), + ast.Field( + chain=["properties"], + type=ast.FieldType(name="properties", table_type=inner_select_type), + ), + ast.Field( + chain=["timestamp"], + type=ast.FieldType(name="timestamp", table_type=inner_select_type), + ), + ast.Field( + chain=["distinct_id"], + type=ast.FieldType(name="distinct_id", table_type=inner_select_type), + ), ast.Field( chain=["elements_chain"], type=ast.FieldType(name="elements_chain", table_type=inner_select_type), ), - ast.Field(chain=["created_at"], type=ast.FieldType(name="created_at", table_type=inner_select_type)), - ast.Field(chain=["$session_id"], type=ast.FieldType(name="$session_id", table_type=inner_select_type)), - ast.Field(chain=["$group_0"], type=ast.FieldType(name="$group_0", table_type=inner_select_type)), - ast.Field(chain=["$group_1"], type=ast.FieldType(name="$group_1", table_type=inner_select_type)), - ast.Field(chain=["$group_2"], type=ast.FieldType(name="$group_2", table_type=inner_select_type)), - ast.Field(chain=["$group_3"], type=ast.FieldType(name="$group_3", table_type=inner_select_type)), - ast.Field(chain=["$group_4"], type=ast.FieldType(name="$group_4", table_type=inner_select_type)), + ast.Field( + chain=["created_at"], + type=ast.FieldType(name="created_at", table_type=inner_select_type), + ), + ast.Field( + chain=["$session_id"], + type=ast.FieldType(name="$session_id", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_0"], + type=ast.FieldType(name="$group_0", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_1"], + type=ast.FieldType(name="$group_1", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_2"], + type=ast.FieldType(name="$group_2", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_3"], + type=ast.FieldType(name="$group_3", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_4"], + type=ast.FieldType(name="$group_4", table_type=inner_select_type), + ), ], ) @@ -938,7 +1097,8 @@ def test_asterisk_expander_multiple_table_error(self): with self.assertRaises(ResolverException) as e: resolve_types(node, self.context) self.assertEqual( - str(e.exception), "Cannot use '*' without table name when there are multiple tables in the query" + str(e.exception), + "Cannot use '*' without table name when there are multiple tables in the query", ) @override_settings(PERSON_ON_EVENTS_OVERRIDE=False, PERSON_ON_EVENTS_V2_OVERRIDE=False) @@ -977,22 +1137,58 @@ def test_asterisk_expander_select_union(self): self.assertEqual( node.select, [ - ast.Field(chain=["uuid"], type=ast.FieldType(name="uuid", table_type=inner_select_type)), - ast.Field(chain=["event"], type=ast.FieldType(name="event", table_type=inner_select_type)), - ast.Field(chain=["properties"], type=ast.FieldType(name="properties", table_type=inner_select_type)), - ast.Field(chain=["timestamp"], type=ast.FieldType(name="timestamp", table_type=inner_select_type)), - ast.Field(chain=["distinct_id"], type=ast.FieldType(name="distinct_id", table_type=inner_select_type)), + ast.Field( + chain=["uuid"], + type=ast.FieldType(name="uuid", table_type=inner_select_type), + ), + ast.Field( + chain=["event"], + type=ast.FieldType(name="event", table_type=inner_select_type), + ), + ast.Field( + chain=["properties"], + type=ast.FieldType(name="properties", table_type=inner_select_type), + ), + ast.Field( + chain=["timestamp"], + type=ast.FieldType(name="timestamp", table_type=inner_select_type), + ), + ast.Field( + chain=["distinct_id"], + type=ast.FieldType(name="distinct_id", table_type=inner_select_type), + ), ast.Field( chain=["elements_chain"], type=ast.FieldType(name="elements_chain", table_type=inner_select_type), ), - ast.Field(chain=["created_at"], type=ast.FieldType(name="created_at", table_type=inner_select_type)), - ast.Field(chain=["$session_id"], type=ast.FieldType(name="$session_id", table_type=inner_select_type)), - ast.Field(chain=["$group_0"], type=ast.FieldType(name="$group_0", table_type=inner_select_type)), - ast.Field(chain=["$group_1"], type=ast.FieldType(name="$group_1", table_type=inner_select_type)), - ast.Field(chain=["$group_2"], type=ast.FieldType(name="$group_2", table_type=inner_select_type)), - ast.Field(chain=["$group_3"], type=ast.FieldType(name="$group_3", table_type=inner_select_type)), - ast.Field(chain=["$group_4"], type=ast.FieldType(name="$group_4", table_type=inner_select_type)), + ast.Field( + chain=["created_at"], + type=ast.FieldType(name="created_at", table_type=inner_select_type), + ), + ast.Field( + chain=["$session_id"], + type=ast.FieldType(name="$session_id", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_0"], + type=ast.FieldType(name="$group_0", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_1"], + type=ast.FieldType(name="$group_1", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_2"], + type=ast.FieldType(name="$group_2", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_3"], + type=ast.FieldType(name="$group_3", table_type=inner_select_type), + ), + ast.Field( + chain=["$group_4"], + type=ast.FieldType(name="$group_4", table_type=inner_select_type), + ), ], ) diff --git a/posthog/hogql/test/test_visitor.py b/posthog/hogql/test/test_visitor.py index d946af02073dd..78b2d6dc42536 100644 --- a/posthog/hogql/test/test_visitor.py +++ b/posthog/hogql/test/test_visitor.py @@ -66,7 +66,10 @@ def test_everything_visitor(self): ], ) ), - ast.Alias(expr=ast.SelectQuery(select=[ast.Field(chain=["timestamp"])]), alias="f"), + ast.Alias( + expr=ast.SelectQuery(select=[ast.Field(chain=["timestamp"])]), + alias="f", + ), ast.SelectQuery( select=[ast.Field(chain=["a"])], select_from=ast.JoinExpr( diff --git a/posthog/hogql/transforms/in_cohort.py b/posthog/hogql/transforms/in_cohort.py index aa1fe0e3a23ee..670d0a8e73c2a 100644 --- a/posthog/hogql/transforms/in_cohort.py +++ b/posthog/hogql/transforms/in_cohort.py @@ -9,12 +9,20 @@ from posthog.hogql.visitor import TraversingVisitor, clone_expr -def resolve_in_cohorts(node: ast.Expr, stack: Optional[List[ast.SelectQuery]] = None, context: HogQLContext = None): +def resolve_in_cohorts( + node: ast.Expr, + stack: Optional[List[ast.SelectQuery]] = None, + context: HogQLContext = None, +): InCohortResolver(stack=stack, context=context).visit(node) class InCohortResolver(TraversingVisitor): - def __init__(self, stack: Optional[List[ast.SelectQuery]] = None, context: HogQLContext = None): + def __init__( + self, + stack: Optional[List[ast.SelectQuery]] = None, + context: HogQLContext = None, + ): super().__init__() self.stack: List[ast.SelectQuery] = stack or [] self.context = context @@ -80,7 +88,12 @@ def visit_compare_operation(self, node: ast.CompareOperation): self.visit(node.right) def _add_join_for_cohort( - self, cohort_id: int, is_static: bool, select: ast.SelectQuery, compare: ast.CompareOperation, negative: bool + self, + cohort_id: int, + is_static: bool, + select: ast.SelectQuery, + compare: ast.CompareOperation, + negative: bool, ): must_add_join = True last_join = select.select_from @@ -115,9 +128,14 @@ def _add_join_for_cohort( ) ), ) - new_join = cast(ast.JoinExpr, resolve_types(new_join, self.context, [self.stack[-1].type])) + new_join = cast( + ast.JoinExpr, + resolve_types(new_join, self.context, [self.stack[-1].type]), + ) new_join.constraint.expr.left = resolve_types( - ast.Field(chain=[f"in_cohort__{cohort_id}", "person_id"]), self.context, [self.stack[-1].type] + ast.Field(chain=[f"in_cohort__{cohort_id}", "person_id"]), + self.context, + [self.stack[-1].type], ) new_join.constraint.expr.right = clone_expr(compare.left) if last_join: @@ -127,6 +145,8 @@ def _add_join_for_cohort( compare.op = ast.CompareOperationOp.NotEq if negative else ast.CompareOperationOp.Eq compare.left = resolve_types( - ast.Field(chain=[f"in_cohort__{cohort_id}", "matched"]), self.context, [self.stack[-1].type] + ast.Field(chain=[f"in_cohort__{cohort_id}", "matched"]), + self.context, + [self.stack[-1].type], ) compare.right = resolve_types(ast.Constant(value=1), self.context, [self.stack[-1].type]) diff --git a/posthog/hogql/transforms/lazy_tables.py b/posthog/hogql/transforms/lazy_tables.py index d2bd4c1398aa9..48018cd789264 100644 --- a/posthog/hogql/transforms/lazy_tables.py +++ b/posthog/hogql/transforms/lazy_tables.py @@ -6,10 +6,15 @@ from posthog.hogql.database.models import LazyJoin, LazyTable from posthog.hogql.errors import HogQLException from posthog.hogql.resolver import resolve_types +from posthog.hogql.resolver_utils import get_long_table_name from posthog.hogql.visitor import TraversingVisitor -def resolve_lazy_tables(node: ast.Expr, stack: Optional[List[ast.SelectQuery]] = None, context: HogQLContext = None): +def resolve_lazy_tables( + node: ast.Expr, + stack: Optional[List[ast.SelectQuery]] = None, + context: HogQLContext = None, +): LazyTableResolver(stack=stack, context=context).visit(node) @@ -28,27 +33,15 @@ class TableToAdd: class LazyTableResolver(TraversingVisitor): - def __init__(self, stack: Optional[List[ast.SelectQuery]] = None, context: HogQLContext = None): + def __init__( + self, + stack: Optional[List[ast.SelectQuery]] = None, + context: HogQLContext = None, + ): super().__init__() self.stack_of_fields: List[List[ast.FieldType | ast.PropertyType]] = [[]] if stack else [] self.context = context - def _get_long_table_name(self, select: ast.SelectQueryType, type: ast.BaseTableType) -> str: - if isinstance(type, ast.TableType): - return select.get_alias_for_table_type(type) - elif isinstance(type, ast.LazyTableType): - return type.table.to_printed_hogql() - elif isinstance(type, ast.TableAliasType): - return type.alias - elif isinstance(type, ast.SelectQueryAliasType): - return type.alias - elif isinstance(type, ast.LazyJoinType): - return f"{self._get_long_table_name(select, type.table_type)}__{type.field}" - elif isinstance(type, ast.VirtualTableType): - return f"{self._get_long_table_name(select, type.table_type)}__{type.field}" - else: - raise HogQLException(f"Unknown table type in LazyTableResolver: {type.__class__.__name__}") - def visit_property_type(self, node: ast.PropertyType): if node.joined_subquery is not None: # we have already visited this property @@ -110,7 +103,7 @@ def visit_select_query(self, node: ast.SelectQuery): if field_or_property.field_type.table_type == join.table.type: fields.append(field_or_property) if len(fields) == 0: - table_name = join.alias or self._get_long_table_name(select_type, join.table.type) + table_name = join.alias or get_long_table_name(select_type, join.table.type) tables_to_add[table_name] = TableToAdd(fields_accessed={}, lazy_table=join.table.type.table) join = join.next_join @@ -139,8 +132,8 @@ def visit_select_query(self, node: ast.SelectQuery): # Loop over the collected lazy tables in reverse order to create the joins for table_type in reversed(table_types): if isinstance(table_type, ast.LazyJoinType): - from_table = self._get_long_table_name(select_type, table_type.table_type) - to_table = self._get_long_table_name(select_type, table_type) + from_table = get_long_table_name(select_type, table_type.table_type) + to_table = get_long_table_name(select_type, table_type) if to_table not in joins_to_add: joins_to_add[to_table] = JoinToAdd( fields_accessed={}, # collect here all fields accessed on this table @@ -159,7 +152,7 @@ def visit_select_query(self, node: ast.SelectQuery): else: new_join.fields_accessed[field.name] = chain elif isinstance(table_type, ast.LazyTableType): - table_name = self._get_long_table_name(select_type, table_type) + table_name = get_long_table_name(select_type, table_type) if table_name not in tables_to_add: tables_to_add[table_name] = TableToAdd( fields_accessed={}, # collect here all fields accessed on this table @@ -203,9 +196,14 @@ def visit_select_query(self, node: ast.SelectQuery): # For all the collected joins, create the join subqueries, and add them to the table. for to_table, join_scope in joins_to_add.items(): join_to_add: ast.JoinExpr = join_scope.lazy_join.join_function( - join_scope.from_table, join_scope.to_table, join_scope.fields_accessed, self.context.modifiers + join_scope.from_table, + join_scope.to_table, + join_scope.fields_accessed, + self.context, + node, ) join_to_add = cast(ast.JoinExpr, resolve_types(join_to_add, self.context, [node.type])) + select_type.tables[to_table] = join_to_add.type join_ptr = node.select_from @@ -239,7 +237,7 @@ def visit_select_query(self, node: ast.SelectQuery): else: raise HogQLException("Should not be reachable") - table_name = self._get_long_table_name(select_type, table_type) + table_name = get_long_table_name(select_type, table_type) table_type = select_type.tables[table_name] if isinstance(field_or_property, ast.FieldType): diff --git a/posthog/hogql/transforms/property_types.py b/posthog/hogql/transforms/property_types.py index be46d24873a91..a2fe60c9aaacd 100644 --- a/posthog/hogql/transforms/property_types.py +++ b/posthog/hogql/transforms/property_types.py @@ -46,7 +46,10 @@ def resolve_property_types(node: ast.Expr, context: HogQLContext = None) -> ast. timezone = context.database.get_timezone() if context and context.database else "UTC" property_swapper = PropertySwapper( - timezone=timezone, event_properties=event_properties, person_properties=person_properties, context=context + timezone=timezone, + event_properties=event_properties, + person_properties=person_properties, + context=context, ) return property_swapper.visit(node) @@ -83,7 +86,11 @@ def visit_field(self, node: ast.Field): class PropertySwapper(CloningVisitor): def __init__( - self, timezone: str, event_properties: Dict[str, str], person_properties: Dict[str, str], context: HogQLContext + self, + timezone: str, + event_properties: Dict[str, str], + person_properties: Dict[str, str], + context: HogQLContext, ): super().__init__(clear_types=False) self.timezone = timezone @@ -98,7 +105,9 @@ def visit_field(self, node: ast.Field): name="toTimeZone", args=[node, ast.Constant(value=self.timezone)], type=ast.CallType( - name="toTimeZone", arg_types=[ast.DateTimeType()], return_type=ast.DateTimeType() + name="toTimeZone", + arg_types=[ast.DateTimeType()], + return_type=ast.DateTimeType(), ), ) @@ -128,7 +137,10 @@ def visit_field(self, node: ast.Field): return node def _convert_string_property_to_type( - self, node: ast.Field, property_type: Literal["event", "person"], property_name: str + self, + node: ast.Field, + property_type: Literal["event", "person"], + property_name: str, ): posthog_field_type = ( self.person_properties.get(property_name) @@ -146,7 +158,12 @@ def _convert_string_property_to_type( return parse_expr("{node} = 'true'", {"node": node}) return node - def _add_property_notice(self, node: ast.Field, property_type: Literal["event", "person"], field_type: str) -> str: + def _add_property_notice( + self, + node: ast.Field, + property_type: Literal["event", "person"], + field_type: str, + ) -> str: property_name = node.chain[-1] if property_type == "person": if self.context.modifiers.personsOnEventsMode != PersonOnEventsMode.DISABLED: diff --git a/posthog/hogql/transforms/test/test_in_cohort.py b/posthog/hogql/transforms/test/test_in_cohort.py index dbef0b685aadf..26e2e18b66af7 100644 --- a/posthog/hogql/transforms/test/test_in_cohort.py +++ b/posthog/hogql/transforms/test/test_in_cohort.py @@ -8,7 +8,12 @@ from posthog.models.cohort.util import recalculate_cohortpeople from posthog.models.utils import UUIDT from posthog.schema import HogQLQueryModifiers -from posthog.test.base import BaseTest, _create_person, _create_event, flush_persons_and_events +from posthog.test.base import ( + BaseTest, + _create_person, + _create_event, + flush_persons_and_events, +) elements_chain_match = lambda x: parse_expr("match(elements_chain, {regex})", {"regex": ast.Constant(value=str(x))}) not_call = lambda x: ast.Call(name="not", args=[x]) @@ -33,7 +38,8 @@ def _create_random_events(self) -> str: def test_in_cohort_dynamic(self): random_uuid = self._create_random_events() cohort = Cohort.objects.create( - team=self.team, groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}] + team=self.team, + groups=[{"properties": [{"key": "$os", "value": "Chrome", "type": "person"}]}], ) recalculate_cohortpeople(cohort, pending_version=0) response = execute_hogql_query( @@ -100,5 +106,8 @@ def test_in_cohort_error(self): self.assertEqual(str(e.exception), "cohort() takes exactly one string or integer argument") with self.assertRaises(HogQLException) as e: - execute_hogql_query(f"SELECT event FROM events WHERE person_id IN COHORT 'blabla'", self.team) + execute_hogql_query( + f"SELECT event FROM events WHERE person_id IN COHORT 'blabla'", + self.team, + ) self.assertEqual(str(e.exception), "Could not find a cohort with the name 'blabla'") diff --git a/posthog/hogql/transforms/test/test_lazy_tables.py b/posthog/hogql/transforms/test/test_lazy_tables.py index aad1dbae3fb1c..131fcb227fbbc 100644 --- a/posthog/hogql/transforms/test/test_lazy_tables.py +++ b/posthog/hogql/transforms/test/test_lazy_tables.py @@ -80,5 +80,9 @@ def test_select_count_from_lazy_table(self): def _print_select(self, select: str): expr = parse_select(select) - query = print_ast(expr, HogQLContext(team_id=self.team.pk, enable_select_queries=True), "clickhouse") + query = print_ast( + expr, + HogQLContext(team_id=self.team.pk, enable_select_queries=True), + "clickhouse", + ) return pretty_print_in_tests(query, self.team.pk) diff --git a/posthog/hogql/transforms/test/test_property_types.py b/posthog/hogql/transforms/test/test_property_types.py index c50f19a0a792d..10d8bf27cc97b 100644 --- a/posthog/hogql/transforms/test/test_property_types.py +++ b/posthog/hogql/transforms/test/test_property_types.py @@ -30,10 +30,16 @@ def setUp(self): defaults={"property_type": "Numeric"}, ) PropertyDefinition.objects.get_or_create( - team=self.team, type=PropertyDefinition.Type.EVENT, name="bool", defaults={"property_type": "Boolean"} + team=self.team, + type=PropertyDefinition.Type.EVENT, + name="bool", + defaults={"property_type": "Boolean"}, ) PropertyDefinition.objects.get_or_create( - team=self.team, type=PropertyDefinition.Type.PERSON, name="tickets", defaults={"property_type": "Numeric"} + team=self.team, + type=PropertyDefinition.Type.PERSON, + name="tickets", + defaults={"property_type": "Numeric"}, ) PropertyDefinition.objects.get_or_create( team=self.team, @@ -89,5 +95,9 @@ def test_resolve_property_types_event_person_poe_on(self): def _print_select(self, select: str): expr = parse_select(select) - query = print_ast(expr, HogQLContext(team_id=self.team.pk, enable_select_queries=True), "clickhouse") + query = print_ast( + expr, + HogQLContext(team_id=self.team.pk, enable_select_queries=True), + "clickhouse", + ) return pretty_print_in_tests(query, self.team.pk) diff --git a/posthog/hogql/visitor.py b/posthog/hogql/visitor.py index c8e1a5a57a789..db6b1ef6fb72e 100644 --- a/posthog/hogql/visitor.py +++ b/posthog/hogql/visitor.py @@ -128,8 +128,8 @@ def visit_select_query(self, node: ast.SelectQuery): self.visit(expr) for expr in node.limit_by or []: self.visit(expr) - self.visit(node.limit), - self.visit(node.offset), + (self.visit(node.limit),) + (self.visit(node.offset),) for expr in (node.window_exprs or {}).values(): self.visit(expr) @@ -248,7 +248,11 @@ def visit_join_constraint(self, node: ast.JoinConstraint): class CloningVisitor(Visitor): """Visitor that traverses and clones the AST tree. Clears types.""" - def __init__(self, clear_types: Optional[bool] = True, clear_locations: Optional[bool] = False): + def __init__( + self, + clear_types: Optional[bool] = True, + clear_locations: Optional[bool] = False, + ): self.clear_types = clear_types self.clear_locations = clear_locations diff --git a/posthog/hogql_queries/events_query_runner.py b/posthog/hogql_queries/events_query_runner.py index ff85691b983d3..d85d251684fa1 100644 --- a/posthog/hogql_queries/events_query_runner.py +++ b/posthog/hogql_queries/events_query_runner.py @@ -99,7 +99,9 @@ def to_query(self) -> ast.SelectQuery: with self.timings.measure("event"): where_exprs.append( parse_expr( - "event = {event}", {"event": ast.Constant(value=self.query.event)}, timings=self.timings + "event = {event}", + {"event": ast.Constant(value=self.query.event)}, + timings=self.timings, ) ) if self.query.actionId: @@ -118,7 +120,9 @@ def to_query(self) -> ast.SelectQuery: ids_list = list(map(str, distinct_ids)) where_exprs.append( parse_expr( - "distinct_id in {list}", {"list": ast.Constant(value=ids_list)}, timings=self.timings + "distinct_id in {list}", + {"list": ast.Constant(value=ids_list)}, + timings=self.timings, ) ) @@ -131,7 +135,9 @@ def to_query(self) -> ast.SelectQuery: parsed_date = relative_date_parse(before, self.team.timezone_info) where_exprs.append( parse_expr( - "timestamp < {timestamp}", {"timestamp": ast.Constant(value=parsed_date)}, timings=self.timings + "timestamp < {timestamp}", + {"timestamp": ast.Constant(value=parsed_date)}, + timings=self.timings, ) ) @@ -261,7 +267,10 @@ def select_input_raw(self) -> List[str]: def limit(self) -> int: # importing locally so we could override in a test - from posthog.hogql.constants import DEFAULT_RETURNED_ROWS, MAX_SELECT_RETURNED_ROWS + from posthog.hogql.constants import ( + DEFAULT_RETURNED_ROWS, + MAX_SELECT_RETURNED_ROWS, + ) # adding +1 to the limit to check if there's a "next page" after the requested results return ( diff --git a/posthog/hogql_queries/hogql_query_runner.py b/posthog/hogql_queries/hogql_query_runner.py index 815822ce894c6..576419fdff967 100644 --- a/posthog/hogql_queries/hogql_query_runner.py +++ b/posthog/hogql_queries/hogql_query_runner.py @@ -10,7 +10,13 @@ from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.query_runner import QueryRunner from posthog.models import Team -from posthog.schema import HogQLQuery, HogQLQueryResponse, DashboardFilter, HogQLFilters, DateRange +from posthog.schema import ( + HogQLQuery, + HogQLQueryResponse, + DashboardFilter, + HogQLFilters, + DateRange, +) class HogQLQueryRunner(QueryRunner): diff --git a/posthog/hogql_queries/insights/lifecycle_query_runner.py b/posthog/hogql_queries/insights/lifecycle_query_runner.py index ffa274958ceb0..87a8a345a8462 100644 --- a/posthog/hogql_queries/insights/lifecycle_query_runner.py +++ b/posthog/hogql_queries/insights/lifecycle_query_runner.py @@ -3,7 +3,10 @@ from typing import Optional, Any, Dict, List from django.utils.timezone import datetime -from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL +from posthog.caching.insights_api import ( + BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, + REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL, +) from posthog.caching.utils import is_stale from posthog.hogql import ast @@ -16,7 +19,12 @@ from posthog.models import Team, Action from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models.filters.mixins.utils import cached_property -from posthog.schema import LifecycleQuery, ActionsNode, EventsNode, LifecycleQueryResponse +from posthog.schema import ( + LifecycleQuery, + ActionsNode, + EventsNode, + LifecycleQueryResponse, +) class LifecycleQueryRunner(QueryRunner): @@ -139,7 +147,10 @@ def calculate(self): @cached_property def query_date_range(self): return QueryDateRange( - date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now() + date_range=self.query.dateRange, + team=self.team, + interval=self.query.interval, + now=datetime.now(), ) @cached_property diff --git a/posthog/hogql_queries/insights/test/test_events_query.py b/posthog/hogql_queries/insights/test/test_events_query.py index 707891d424a41..927829290367f 100644 --- a/posthog/hogql_queries/insights/test/test_events_query.py +++ b/posthog/hogql_queries/insights/test/test_events_query.py @@ -8,7 +8,12 @@ EventPropertyFilter, PropertyOperator, ) -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) class TestEventsQueryRunner(ClickhouseTestMixin, APIBaseTest): diff --git a/posthog/hogql_queries/insights/test/test_lifecycle_query_runner.py b/posthog/hogql_queries/insights/test/test_lifecycle_query_runner.py index 75637d5216ebd..1dba61d970e6c 100644 --- a/posthog/hogql_queries/insights/test/test_lifecycle_query_runner.py +++ b/posthog/hogql_queries/insights/test/test_lifecycle_query_runner.py @@ -6,7 +6,13 @@ from posthog.hogql_queries.insights.lifecycle_query_runner import LifecycleQueryRunner from posthog.models.utils import UUIDT from posthog.schema import DateRange, IntervalType, LifecycleQuery, EventsNode -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person, flush_persons_and_events +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, + flush_persons_and_events, +) class TestLifecycleQueryRunner(ClickhouseTestMixin, APIBaseTest): @@ -26,7 +32,11 @@ def _create_random_events(self) -> str: distinct_id="bla", event="random event", team=self.team, - properties={"random_prop": "don't include", "random_uuid": random_uuid, "index": index}, + properties={ + "random_prop": "don't include", + "random_uuid": random_uuid, + "index": index, + }, ) flush_persons_and_events() return random_uuid @@ -39,7 +49,10 @@ def _create_events(self, data, event="$pageview"): _create_person( team_id=self.team.pk, distinct_ids=[id], - properties={"name": id, **({"email": "test@posthog.com"} if id == "p1" else {})}, + properties={ + "name": id, + **({"email": "test@posthog.com"} if id == "p1" else {}), + }, ) ) for timestamp in timestamps: @@ -69,7 +82,9 @@ def _create_test_events(self): def _create_query_runner(self, date_from, date_to, interval) -> LifecycleQueryRunner: series = [EventsNode(event="$pageview")] query = LifecycleQuery( - dateRange=DateRange(date_from=date_from, date_to=date_to), interval=interval, series=series + dateRange=DateRange(date_from=date_from, date_to=date_to), + interval=interval, + series=series, ) return LifecycleQueryRunner(team=self.team, query=query) diff --git a/posthog/hogql_queries/insights/trends/aggregation_operations.py b/posthog/hogql_queries/insights/trends/aggregation_operations.py index f585fc313dc70..3920344cbfd52 100644 --- a/posthog/hogql_queries/insights/trends/aggregation_operations.py +++ b/posthog/hogql_queries/insights/trends/aggregation_operations.py @@ -121,7 +121,10 @@ def _events_query(self, events_where_clause: ast.Expr, sample_value: ast.RatioEx timestamp, actor_id """, - placeholders={"events_where_clause": events_where_clause, "sample": sample_value}, + placeholders={ + "events_where_clause": events_where_clause, + "sample": sample_value, + }, ) def get_query_orchestrator(self, events_where_clause: ast.Expr, sample_value: str): diff --git a/posthog/hogql_queries/insights/trends/breakdown.py b/posthog/hogql_queries/insights/trends/breakdown.py index 403c5be4da536..a713cb09dcee1 100644 --- a/posthog/hogql_queries/insights/trends/breakdown.py +++ b/posthog/hogql_queries/insights/trends/breakdown.py @@ -4,7 +4,10 @@ from posthog.hogql.timings import HogQLTimings from posthog.hogql_queries.insights.trends.breakdown_session import BreakdownSession from posthog.hogql_queries.insights.trends.breakdown_values import BreakdownValues -from posthog.hogql_queries.insights.trends.utils import get_properties_chain, series_event_name +from posthog.hogql_queries.insights.trends.utils import ( + get_properties_chain, + series_event_name, +) from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models.filters.mixins.utils import cached_property from posthog.models.team.team import Team @@ -143,7 +146,6 @@ def _get_breakdown_histogram_multi_if(self) -> ast.Expr: buckets = self._get_breakdown_histogram_buckets() for lower_bound, upper_bound in buckets: - multi_if_exprs.extend( [ ast.And( diff --git a/posthog/hogql_queries/insights/trends/breakdown_values.py b/posthog/hogql_queries/insights/trends/breakdown_values.py index 72ae54d0286be..37d9f7168e121 100644 --- a/posthog/hogql_queries/insights/trends/breakdown_values.py +++ b/posthog/hogql_queries/insights/trends/breakdown_values.py @@ -122,7 +122,12 @@ def _where_filter(self) -> ast.Expr: ) if self.event_name is not None: - filters.append(parse_expr("event = {event}", placeholders={"event": ast.Constant(value=self.event_name)})) + filters.append( + parse_expr( + "event = {event}", + placeholders={"event": ast.Constant(value=self.event_name)}, + ) + ) return ast.And(exprs=filters) diff --git a/posthog/hogql_queries/insights/trends/query_builder.py b/posthog/hogql_queries/insights/trends/query_builder.py index 3c0cd7d9356c7..0a90cae985dba 100644 --- a/posthog/hogql_queries/insights/trends/query_builder.py +++ b/posthog/hogql_queries/insights/trends/query_builder.py @@ -3,7 +3,9 @@ from posthog.hogql.parser import parse_expr, parse_select from posthog.hogql.property import property_to_expr from posthog.hogql.timings import HogQLTimings -from posthog.hogql_queries.insights.trends.aggregation_operations import AggregationOperations +from posthog.hogql_queries.insights.trends.aggregation_operations import ( + AggregationOperations, +) from posthog.hogql_queries.insights.trends.breakdown import Breakdown from posthog.hogql_queries.insights.trends.breakdown_session import BreakdownSession from posthog.hogql_queries.insights.trends.utils import series_event_name @@ -157,7 +159,8 @@ def _get_events_subquery(self) -> ast.SelectQuery: # Just complex series aggregation elif self._aggregation_operation.requires_query_orchestration(): return self._aggregation_operation.get_query_orchestrator( - events_where_clause=self._events_filter(), sample_value=self._sample_value() + events_where_clause=self._events_filter(), + sample_value=self._sample_value(), ).build() return default_query @@ -222,7 +225,8 @@ def _events_filter(self) -> ast.Expr: if series_event_name(self.series) is not None: filters.append( parse_expr( - "event = {event}", placeholders={"event": ast.Constant(value=series_event_name(self.series))} + "event = {event}", + placeholders={"event": ast.Constant(value=series_event_name(self.series))}, ) ) diff --git a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py index 760c55577d7db..88e012672e12d 100644 --- a/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/test/test_trends_query_runner.py @@ -3,8 +3,19 @@ from freezegun import freeze_time from posthog.hogql_queries.insights.trends.trends_query_runner import TrendsQueryRunner -from posthog.schema import DateRange, EventsNode, IntervalType, TrendsFilter, TrendsQuery -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.schema import ( + DateRange, + EventsNode, + IntervalType, + TrendsFilter, + TrendsQuery, +) +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) @dataclass @@ -41,7 +52,12 @@ def _create_events(self, data: List[SeriesTestData]): ) for event in person.events: for timestamp in event.timestamps: - _create_event(team=self.team, event=event.event, distinct_id=id, timestamp=timestamp) + _create_event( + team=self.team, + event=event.event, + distinct_id=id, + timestamp=timestamp, + ) return person_result def _create_test_events(self): @@ -74,7 +90,10 @@ def _create_test_events(self): SeriesTestData( distinct_id="p2", events=[ - Series(event="$pageview", timestamps=["2020-01-09T12:00:00Z", "2020-01-12T12:00:00Z"]), + Series( + event="$pageview", + timestamps=["2020-01-09T12:00:00Z", "2020-01-12T12:00:00Z"], + ), Series( event="$pageleave", timestamps=[ @@ -111,7 +130,12 @@ def _create_query_runner(self, date_from, date_to, interval, series, trends_filt return TrendsQueryRunner(team=self.team, query=query) def _run_trends_query( - self, date_from, date_to, interval, series=None, trends_filters: Optional[TrendsFilter] = None + self, + date_from, + date_to, + interval, + series=None, + trends_filters: Optional[TrendsFilter] = None, ): return self._create_query_runner(date_from, date_to, interval, series, trends_filters).calculate() @@ -221,7 +245,11 @@ def test_trends_query_compare(self): self._create_test_events() response = self._run_trends_query( - "2020-01-15", "2020-01-19", IntervalType.day, [EventsNode(event="$pageview")], TrendsFilter(compare=True) + "2020-01-15", + "2020-01-19", + IntervalType.day, + [EventsNode(event="$pageview")], + TrendsFilter(compare=True), ) self.assertEqual(2, len(response.results)) diff --git a/posthog/hogql_queries/insights/trends/trends_query_runner.py b/posthog/hogql_queries/insights/trends/trends_query_runner.py index 9c1dc4eca64f5..cfbcb60fdf28e 100644 --- a/posthog/hogql_queries/insights/trends/trends_query_runner.py +++ b/posthog/hogql_queries/insights/trends/trends_query_runner.py @@ -6,7 +6,10 @@ from typing import List, Optional, Any, Dict from django.utils.timezone import datetime -from posthog.caching.insights_api import BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL +from posthog.caching.insights_api import ( + BASE_MINIMUM_INSIGHT_REFRESH_INTERVAL, + REDUCED_MINIMUM_INSIGHT_REFRESH_INTERVAL, +) from posthog.caching.utils import is_stale from posthog.hogql import ast @@ -17,12 +20,20 @@ from posthog.hogql_queries.query_runner import QueryRunner from posthog.hogql_queries.utils.formula_ast import FormulaAST from posthog.hogql_queries.utils.query_date_range import QueryDateRange -from posthog.hogql_queries.utils.query_previous_period_date_range import QueryPreviousPeriodDateRange +from posthog.hogql_queries.utils.query_previous_period_date_range import ( + QueryPreviousPeriodDateRange, +) from posthog.models import Team from posthog.models.cohort.cohort import Cohort from posthog.models.filters.mixins.utils import cached_property from posthog.models.property_definition import PropertyDefinition -from posthog.schema import ActionsNode, EventsNode, HogQLQueryResponse, TrendsQuery, TrendsQueryResponse +from posthog.schema import ( + ActionsNode, + EventsNode, + HogQLQueryResponse, + TrendsQuery, + TrendsQueryResponse, +) class TrendsQueryRunner(QueryRunner): @@ -141,7 +152,10 @@ def build_series_response(self, response: HogQLQueryResponse, series: SeriesWith # Modifications for when comparing to previous period if self.query.trendsFilter is not None and self.query.trendsFilter.compare: labels = [ - "{} {}".format(self.query.interval if self.query.interval is not None else "day", i) + "{} {}".format( + self.query.interval if self.query.interval is not None else "day", + i, + ) for i in range(len(series_object["labels"])) ] @@ -171,13 +185,19 @@ def build_series_response(self, response: HogQLQueryResponse, series: SeriesWith @cached_property def query_date_range(self): return QueryDateRange( - date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now() + date_range=self.query.dateRange, + team=self.team, + interval=self.query.interval, + now=datetime.now(), ) @cached_property def query_previous_date_range(self): return QueryPreviousPeriodDateRange( - date_range=self.query.dateRange, team=self.team, interval=self.query.interval, now=datetime.now() + date_range=self.query.dateRange, + team=self.team, + interval=self.query.interval, + now=datetime.now(), ) def series_event(self, series: EventsNode | ActionsNode) -> str | None: @@ -209,12 +229,16 @@ def setup_series(self) -> List[SeriesWithExtras]: for series in series_with_extras: updated_series.append( SeriesWithExtras( - series=series.series, is_previous_period_series=False, overriden_query=series.overriden_query + series=series.series, + is_previous_period_series=False, + overriden_query=series.overriden_query, ) ) updated_series.append( SeriesWithExtras( - series=series.series, is_previous_period_series=True, overriden_query=series.overriden_query + series=series.series, + is_previous_period_series=True, + overriden_query=series.overriden_query, ) ) series_with_extras = updated_series @@ -265,7 +289,9 @@ def _is_breakdown_field_boolean(self): property_type = PropertyDefinition.Type.EVENT field_type = self._event_property( - self.query.breakdown.breakdown, property_type, self.query.breakdown.breakdown_group_type_index + self.query.breakdown.breakdown, + property_type, + self.query.breakdown.breakdown_group_type_index, ) return field_type == "Boolean" @@ -273,7 +299,12 @@ def _convert_boolean(self, value: any): bool_map = {1: "true", 0: "false", "": ""} return bool_map.get(value) or value - def _event_property(self, field: str, field_type: PropertyDefinition.Type, group_type_index: Optional[int]): + def _event_property( + self, + field: str, + field_type: PropertyDefinition.Type, + group_type_index: Optional[int], + ): return PropertyDefinition.objects.get( name=field, team=self.team, diff --git a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py index f8941c3899125..ce490cadfc834 100644 --- a/posthog/hogql_queries/legacy_compatibility/filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/filter_to_query.py @@ -240,11 +240,17 @@ def _properties(filter: Dict): if raw_properties is None or len(raw_properties) == 0: return {} elif isinstance(raw_properties, list): - raw_properties = {"type": "AND", "values": [{"type": "AND", "values": raw_properties}]} + raw_properties = { + "type": "AND", + "values": [{"type": "AND", "values": raw_properties}], + } return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))} elif is_old_style_properties(raw_properties): raw_properties = transform_old_style_properties(raw_properties) - raw_properties = {"type": "AND", "values": [{"type": "AND", "values": raw_properties}]} + raw_properties = { + "type": "AND", + "values": [{"type": "AND", "values": raw_properties}], + } return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))} else: return {"properties": PropertyGroupFilter(**clean_properties(raw_properties))} diff --git a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py index f07405b248976..9a130faa9774f 100644 --- a/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py +++ b/posthog/hogql_queries/legacy_compatibility/test/test_filter_to_query.py @@ -58,8 +58,20 @@ } insight_2 = { "events": [ - {"id": "signed_up", "name": "signed_up", "type": "events", "order": 2, "custom_name": "Signed up"}, - {"id": "upgraded_plan", "name": "upgraded_plan", "type": "events", "order": 4, "custom_name": "Upgraded plan"}, + { + "id": "signed_up", + "name": "signed_up", + "type": "events", + "order": 2, + "custom_name": "Signed up", + }, + { + "id": "upgraded_plan", + "name": "upgraded_plan", + "type": "events", + "order": 4, + "custom_name": "Upgraded plan", + }, ], "actions": [{"id": 1, "name": "Interacted with file", "type": "actions", "order": 3}], "display": "FunnelViz", @@ -76,17 +88,45 @@ "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}]} + { + "type": "AND", + "values": [ + { + "key": "email", + "type": "person", + "value": "is_set", + "operator": "is_set", + } + ], + } ], }, - "target_entity": {"id": "signed_up", "name": "signed_up", "type": "events", "order": 0}, + "target_entity": { + "id": "signed_up", + "name": "signed_up", + "type": "events", + "order": 0, + }, "retention_type": "retention_first_time", "total_intervals": 9, - "returning_entity": {"id": 1, "name": "Interacted with file", "type": "actions", "order": 0}, + "returning_entity": { + "id": 1, + "name": "Interacted with file", + "type": "actions", + "order": 0, + }, } insight_4 = { "events": [], - "actions": [{"id": 1, "math": "total", "name": "Interacted with file", "type": "actions", "order": 0}], + "actions": [ + { + "id": 1, + "math": "total", + "name": "Interacted with file", + "type": "actions", + "order": 0, + } + ], "compare": False, "display": "ActionsLineGraph", "insight": "LIFECYCLE", @@ -128,7 +168,15 @@ "filter_test_accounts": True, } insight_6 = { - "events": [{"id": "paid_bill", "math": "sum", "type": "events", "order": 0, "math_property": "amount_usd"}], + "events": [ + { + "id": "paid_bill", + "math": "sum", + "type": "events", + "order": 0, + "math_property": "amount_usd", + } + ], "actions": [], "display": "ActionsLineGraph", "insight": "TRENDS", @@ -170,7 +218,14 @@ "values": [ { "type": "AND", - "values": [{"key": "$current_url", "type": "event", "value": "/files/", "operator": "not_icontains"}], + "values": [ + { + "key": "$current_url", + "type": "event", + "value": "/files/", + "operator": "not_icontains", + } + ], } ], }, @@ -184,7 +239,12 @@ "type": "events", "order": 0, "properties": [ - {"key": "$current_url", "type": "event", "value": "https://hedgebox.net/", "operator": "exact"} + { + "key": "$current_url", + "type": "event", + "value": "https://hedgebox.net/", + "operator": "exact", + } ], "custom_name": "Viewed homepage", }, @@ -194,11 +254,22 @@ "type": "events", "order": 1, "properties": [ - {"key": "$current_url", "type": "event", "value": "https://hedgebox.net/signup/", "operator": "regex"} + { + "key": "$current_url", + "type": "event", + "value": "https://hedgebox.net/signup/", + "operator": "regex", + } ], "custom_name": "Viewed signup page", }, - {"id": "signed_up", "name": "signed_up", "type": "events", "order": 2, "custom_name": "Signed up"}, + { + "id": "signed_up", + "name": "signed_up", + "type": "events", + "order": 2, + "custom_name": "Signed up", + }, ], "actions": [], "display": "FunnelViz", @@ -279,9 +350,24 @@ } insight_17 = { "events": [ - {"id": "$pageview", "type": "events", "order": 0, "custom_name": "First page view"}, - {"id": "$pageview", "type": "events", "order": 1, "custom_name": "Second page view"}, - {"id": "$pageview", "type": "events", "order": 2, "custom_name": "Third page view"}, + { + "id": "$pageview", + "type": "events", + "order": 0, + "custom_name": "First page view", + }, + { + "id": "$pageview", + "type": "events", + "order": 1, + "custom_name": "Second page view", + }, + { + "id": "$pageview", + "type": "events", + "order": 2, + "custom_name": "Third page view", + }, ], "layout": "horizontal", "display": "FunnelViz", @@ -303,7 +389,14 @@ "name": "Pageviews", "type": "actions", "order": 0, - "properties": [{"key": "$browser", "type": "event", "value": "Chrome", "operator": None}], + "properties": [ + { + "key": "$browser", + "type": "event", + "value": "Chrome", + "operator": None, + } + ], "math_property": None, } ], @@ -392,7 +485,14 @@ "interval": "day", "shown_as": "Volume", "breakdown": False, - "properties": [{"key": "$current_url", "type": "event", "value": "https://example.com/", "operator": "icontains"}], + "properties": [ + { + "key": "$current_url", + "type": "event", + "value": "https://example.com/", + "operator": "icontains", + } + ], "breakdown_type": "undefined", } insight_24 = { @@ -471,7 +571,12 @@ "type": "events", "order": 1, "properties": [ - {"key": "$current_url", "type": "event", "value": "posthog.com/signup$", "operator": "regex"} + { + "key": "$current_url", + "type": "event", + "value": "posthog.com/signup$", + "operator": "regex", + } ], "custom_name": "Views on signup page", }, @@ -491,7 +596,15 @@ "breakdown_group_type_index": 0, } insight_31 = { - "events": [{"id": "$autocapture", "math": "total", "name": "$autocapture", "type": "events", "order": 0}], + "events": [ + { + "id": "$autocapture", + "math": "total", + "name": "$autocapture", + "type": "events", + "order": 0, + } + ], "insight": "STICKINESS", "entity_type": "events", } @@ -592,7 +705,12 @@ def test_base_insights(filter: dict): properties_1 = [{"key": "account_id", "type": "event", "value": ["some_id"], "operator": "exact"}] properties_2 = [ {"key": "account_id", "type": "event", "value": ["some_id"], "operator": "exact"}, - {"key": "$current_url", "type": "event", "value": "/path", "operator": "not_icontains"}, + { + "key": "$current_url", + "type": "event", + "value": "/path", + "operator": "not_icontains", + }, ] properties_3 = {} properties_4 = {"type": "AND", "values": []} @@ -603,8 +721,18 @@ def test_base_insights(filter: dict): { "type": "AND", "values": [ - {"key": "$current_url", "type": "event", "value": "?", "operator": "not_icontains"}, - {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"}, + { + "key": "$current_url", + "type": "event", + "value": "?", + "operator": "not_icontains", + }, + { + "key": "$referring_domain", + "type": "event", + "value": "google", + "operator": "icontains", + }, ], } ], @@ -612,10 +740,19 @@ def test_base_insights(filter: dict): properties_7 = { "type": "AND", "values": [ - {"type": "AND", "values": [{"type": "AND", "values": []}, {"type": "AND", "values": []}]}, { "type": "AND", - "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}], + "values": [{"type": "AND", "values": []}, {"type": "AND", "values": []}], + }, + { + "type": "AND", + "values": [ + { + "key": "dateDiff('minute', timestamp, now()) < 5", + "type": "hogql", + "value": None, + } + ], }, ], } @@ -624,11 +761,23 @@ def test_base_insights(filter: dict): "values": [ { "type": "AND", - "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}], + "values": [ + { + "key": "dateDiff('minute', timestamp, now()) < 5", + "type": "hogql", + "value": None, + } + ], }, { "type": "AND", - "values": [{"key": "dateDiff('minute', timestamp, now()) < 5", "type": "hogql", "value": None}], + "values": [ + { + "key": "dateDiff('minute', timestamp, now()) < 5", + "type": "hogql", + "value": None, + } + ], }, ], } @@ -638,9 +787,24 @@ def test_base_insights(filter: dict): { "type": "AND", "values": [ - {"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"}, - {"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "person"}, - {"key": "$feature/hogql-insights", "value": ["true"], "operator": "exact", "type": "event"}, + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "event", + }, + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "person", + }, + { + "key": "$feature/hogql-insights", + "value": ["true"], + "operator": "exact", + "type": "event", + }, { "key": "site_url", "value": ["http://localhost:8000"], @@ -649,8 +813,18 @@ def test_base_insights(filter: dict): "group_type_index": 1, }, {"key": "id", "value": 2, "type": "cohort"}, - {"key": "tag_name", "value": ["elem"], "operator": "exact", "type": "element"}, - {"key": "$session_duration", "value": None, "operator": "gt", "type": "session"}, + { + "key": "tag_name", + "value": ["elem"], + "operator": "exact", + "type": "element", + }, + { + "key": "$session_duration", + "value": None, + "operator": "gt", + "type": "session", + }, {"type": "hogql", "key": "properties.name", "value": None}, ], }, @@ -659,7 +833,14 @@ def test_base_insights(filter: dict): } properties_10 = [{"key": "id", "type": "cohort", "value": 71, "operator": None}] properties_11 = [{"key": [498], "type": "cohort", "value": 498, "operator": None}] -properties_12 = [{"key": "userId", "type": "event", "values": ["63ffaeae99ac3c4240976d60"], "operator": "exact"}] +properties_12 = [ + { + "key": "userId", + "type": "event", + "values": ["63ffaeae99ac3c4240976d60"], + "operator": "exact", + } +] properties_13 = {"plan": "premium"} properties_14 = {"$current_url__icontains": "signin"} @@ -783,7 +964,10 @@ def test_series_custom(self): def test_series_order(self): filter = { - "events": [{"id": "$pageview", "order": 1}, {"id": "$pageview", "math": "dau", "order": 2}], + "events": [ + {"id": "$pageview", "order": 1}, + {"id": "$pageview", "math": "dau", "order": 2}, + ], "actions": [{"id": 1, "order": 3}, {"id": 1, "math": "dau", "order": 0}], } @@ -803,9 +987,20 @@ def test_series_math(self): filter = { "events": [ {"id": "$pageview", "math": "dau"}, # base math type - {"id": "$pageview", "math": "median", "math_property": "$math_prop"}, # property math type - {"id": "$pageview", "math": "avg_count_per_actor"}, # count per actor math type - {"id": "$pageview", "math": "unique_group", "math_group_type_index": 0}, # unique group + { + "id": "$pageview", + "math": "median", + "math_property": "$math_prop", + }, # property math type + { + "id": "$pageview", + "math": "avg_count_per_actor", + }, # count per actor math type + { + "id": "$pageview", + "math": "unique_group", + "math_group_type_index": 0, + }, # unique group { "id": "$pageview", "math": "hogql", @@ -821,10 +1016,22 @@ def test_series_math(self): [ EventsNode(event="$pageview", name="$pageview", math=BaseMathType.dau), EventsNode( - event="$pageview", name="$pageview", math=PropertyMathType.median, math_property="$math_prop" + event="$pageview", + name="$pageview", + math=PropertyMathType.median, + math_property="$math_prop", + ), + EventsNode( + event="$pageview", + name="$pageview", + math=CountPerActorMathType.avg_count_per_actor, + ), + EventsNode( + event="$pageview", + name="$pageview", + math="unique_group", + math_group_type_index=0, ), - EventsNode(event="$pageview", name="$pageview", math=CountPerActorMathType.avg_count_per_actor), - EventsNode(event="$pageview", name="$pageview", math="unique_group", math_group_type_index=0), EventsNode( event="$pageview", name="$pageview", @@ -840,21 +1047,52 @@ def test_series_properties(self): {"id": "$pageview", "properties": []}, # smoke test { "id": "$pageview", - "properties": [{"key": "success", "type": "event", "value": ["true"], "operator": "exact"}], + "properties": [ + { + "key": "success", + "type": "event", + "value": ["true"], + "operator": "exact", + } + ], }, { "id": "$pageview", - "properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + "properties": [ + { + "key": "email", + "type": "person", + "value": "is_set", + "operator": "is_set", + } + ], }, { "id": "$pageview", - "properties": [{"key": "text", "value": ["some text"], "operator": "exact", "type": "element"}], + "properties": [ + { + "key": "text", + "value": ["some text"], + "operator": "exact", + "type": "element", + } + ], + }, + { + "id": "$pageview", + "properties": [ + { + "key": "$session_duration", + "value": 1, + "operator": "gt", + "type": "session", + } + ], }, { "id": "$pageview", - "properties": [{"key": "$session_duration", "value": 1, "operator": "gt", "type": "session"}], + "properties": [{"key": "id", "value": 2, "type": "cohort"}], }, - {"id": "$pageview", "properties": [{"key": "id", "value": 2, "type": "cohort"}]}, { "id": "$pageview", "properties": [ @@ -870,14 +1108,28 @@ def test_series_properties(self): { "id": "$pageview", "properties": [ - {"key": "dateDiff('minute', timestamp, now()) < 30", "type": "hogql", "value": None} + { + "key": "dateDiff('minute', timestamp, now()) < 30", + "type": "hogql", + "value": None, + } ], }, { "id": "$pageview", "properties": [ - {"key": "$referring_domain", "type": "event", "value": "google", "operator": "icontains"}, - {"key": "utm_source", "type": "event", "value": "is_not_set", "operator": "is_not_set"}, + { + "key": "$referring_domain", + "type": "event", + "value": "google", + "operator": "icontains", + }, + { + "key": "utm_source", + "type": "event", + "value": "is_not_set", + "operator": "is_not_set", + }, ], }, ] @@ -892,18 +1144,34 @@ def test_series_properties(self): EventsNode( event="$pageview", name="$pageview", - properties=[EventPropertyFilter(key="success", value=["true"], operator=PropertyOperator.exact)], + properties=[ + EventPropertyFilter( + key="success", + value=["true"], + operator=PropertyOperator.exact, + ) + ], ), EventsNode( event="$pageview", name="$pageview", - properties=[PersonPropertyFilter(key="email", value="is_set", operator=PropertyOperator.is_set)], + properties=[ + PersonPropertyFilter( + key="email", + value="is_set", + operator=PropertyOperator.is_set, + ) + ], ), EventsNode( event="$pageview", name="$pageview", properties=[ - ElementPropertyFilter(key=Key.text, value=["some text"], operator=PropertyOperator.exact) + ElementPropertyFilter( + key=Key.text, + value=["some text"], + operator=PropertyOperator.exact, + ) ], ), EventsNode( @@ -911,13 +1179,20 @@ def test_series_properties(self): name="$pageview", properties=[SessionPropertyFilter(value=1, operator=PropertyOperator.gt)], ), - EventsNode(event="$pageview", name="$pageview", properties=[CohortPropertyFilter(value=2)]), + EventsNode( + event="$pageview", + name="$pageview", + properties=[CohortPropertyFilter(value=2)], + ), EventsNode( event="$pageview", name="$pageview", properties=[ GroupPropertyFilter( - key="name", value=["Hedgebox Inc."], operator=PropertyOperator.exact, group_type_index=2 + key="name", + value=["Hedgebox Inc."], + operator=PropertyOperator.exact, + group_type_index=2, ) ], ), @@ -931,9 +1206,15 @@ def test_series_properties(self): name="$pageview", properties=[ EventPropertyFilter( - key="$referring_domain", value="google", operator=PropertyOperator.icontains + key="$referring_domain", + value="google", + operator=PropertyOperator.icontains, + ), + EventPropertyFilter( + key="utm_source", + value="is_not_set", + operator=PropertyOperator.is_not_set, ), - EventPropertyFilter(key="utm_source", value="is_not_set", operator=PropertyOperator.is_not_set), ], ), ], @@ -1070,7 +1351,11 @@ def test_retention_filter(self): "retention_type": "retention_first_time", # retention_reference="previous", "total_intervals": 12, - "returning_entity": {"id": "$pageview", "name": "$pageview", "type": "events"}, + "returning_entity": { + "id": "$pageview", + "name": "$pageview", + "type": "events", + }, "target_entity": {"id": "$pageview", "name": "$pageview", "type": "events"}, "period": "Week", } @@ -1119,7 +1404,13 @@ def test_paths_filter(self): "funnel_filter": { "insight": "FUNNELS", "events": [ - {"type": "events", "id": "$pageview", "order": 0, "name": "$pageview", "math": "total"}, + { + "type": "events", + "id": "$pageview", + "order": 0, + "name": "$pageview", + "math": "total", + }, {"type": "events", "id": None, "order": 1, "math": "total"}, ], "funnel_viz_type": "steps", @@ -1152,7 +1443,13 @@ def test_paths_filter(self): funnel_filter={ "insight": "FUNNELS", "events": [ - {"type": "events", "id": "$pageview", "order": 0, "name": "$pageview", "math": "total"}, + { + "type": "events", + "id": "$pageview", + "order": 0, + "name": "$pageview", + "math": "total", + }, {"type": "events", "id": None, "order": 1, "math": "total"}, ], "funnel_viz_type": "steps", diff --git a/posthog/hogql_queries/persons_query_runner.py b/posthog/hogql_queries/persons_query_runner.py index a373a1acbf7d9..d597f4bab1c2a 100644 --- a/posthog/hogql_queries/persons_query_runner.py +++ b/posthog/hogql_queries/persons_query_runner.py @@ -69,7 +69,11 @@ def filter_conditions(self) -> List[ast.Expr]: source_query_runner = get_query_runner(source, self.team, self.timings) source_query = source_query_runner.to_persons_query() where_exprs.append( - ast.CompareOperation(left=ast.Field(chain=["id"]), op=ast.CompareOperationOp.In, right=source_query) + ast.CompareOperation( + left=ast.Field(chain=["id"]), + op=ast.CompareOperationOp.In, + right=source_query, + ) ) except NotImplementedError: raise ValueError(f"Queries of type '{source.kind}' are not implemented as a PersonsQuery sources.") @@ -113,7 +117,10 @@ def input_columns(self) -> List[str]: return self.query.select or ["person", "id", "created_at", "person.$delete"] def query_limit(self) -> int: - return min(MAX_SELECT_RETURNED_ROWS, DEFAULT_RETURNED_ROWS if self.query.limit is None else self.query.limit) + return min( + MAX_SELECT_RETURNED_ROWS, + DEFAULT_RETURNED_ROWS if self.query.limit is None else self.query.limit, + ) def to_query(self) -> ast.SelectQuery: with self.timings.measure("columns"): @@ -175,7 +182,8 @@ def to_query(self) -> ast.SelectQuery: ast.OrderExpr( expr=ast.Field(chain=["properties", order_property]), order=cast( - Literal["ASC", "DESC"], "DESC" if self.query.orderBy[0] == "person DESC" else "ASC" + Literal["ASC", "DESC"], + "DESC" if self.query.orderBy[0] == "person DESC" else "ASC", ), ) ] diff --git a/posthog/hogql_queries/test/test_hogql_query_runner.py b/posthog/hogql_queries/test/test_hogql_query_runner.py index 6af80f638e3ba..badc27efef3bf 100644 --- a/posthog/hogql_queries/test/test_hogql_query_runner.py +++ b/posthog/hogql_queries/test/test_hogql_query_runner.py @@ -3,7 +3,13 @@ from posthog.hogql_queries.hogql_query_runner import HogQLQueryRunner from posthog.models.utils import UUIDT from posthog.schema import HogQLPropertyFilter, HogQLQuery, HogQLFilters -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_person, flush_persons_and_events, _create_event +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_person, + flush_persons_and_events, + _create_event, +) class TestHogQLQueryRunner(ClickhouseTestMixin, APIBaseTest): @@ -24,7 +30,11 @@ def _create_random_persons(self) -> str: distinct_ids=[f"id-{random_uuid}-{index}"], is_identified=True, ) - _create_event(distinct_id=f"id-{random_uuid}-{index}", event=f"clicky-{index}", team=self.team) + _create_event( + distinct_id=f"id-{random_uuid}-{index}", + event=f"clicky-{index}", + team=self.team, + ) flush_persons_and_events() return random_uuid @@ -60,7 +70,9 @@ def test_hogql_query_filters(self): select=[ast.Call(name="count", args=[ast.Field(chain=["event"])])], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), where=ast.CompareOperation( - left=ast.Field(chain=["event"]), op=ast.CompareOperationOp.Eq, right=ast.Constant(value="clicky-3") + left=ast.Field(chain=["event"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value="clicky-3"), ), ) self.assertEqual(clear_locations(query), expected) @@ -69,7 +81,10 @@ def test_hogql_query_filters(self): def test_hogql_query_values(self): runner = self._create_runner( - HogQLQuery(query="select count(event) from events where event={e}", values={"e": "clicky-3"}) + HogQLQuery( + query="select count(event) from events where event={e}", + values={"e": "clicky-3"}, + ) ) query = runner.to_query() query = clear_locations(query) @@ -77,7 +92,9 @@ def test_hogql_query_values(self): select=[ast.Call(name="count", args=[ast.Field(chain=["event"])])], select_from=ast.JoinExpr(table=ast.Field(chain=["events"])), where=ast.CompareOperation( - left=ast.Field(chain=["event"]), op=ast.CompareOperationOp.Eq, right=ast.Constant(value="clicky-3") + left=ast.Field(chain=["event"]), + op=ast.CompareOperationOp.Eq, + right=ast.Constant(value="clicky-3"), ), ) self.assertEqual(clear_locations(query), expected) diff --git a/posthog/hogql_queries/test/test_persons_query_runner.py b/posthog/hogql_queries/test/test_persons_query_runner.py index fbe65319a5912..7460d8cd728b7 100644 --- a/posthog/hogql_queries/test/test_persons_query_runner.py +++ b/posthog/hogql_queries/test/test_persons_query_runner.py @@ -13,7 +13,13 @@ EventsNode, IntervalType, ) -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_person, flush_persons_and_events, _create_event +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_person, + flush_persons_and_events, + _create_event, +) from freezegun import freeze_time @@ -35,7 +41,11 @@ def _create_random_persons(self) -> str: distinct_ids=[f"id-{random_uuid}-{index}"], is_identified=True, ) - _create_event(distinct_id=f"id-{random_uuid}-{index}", event=f"clicky-{index}", team=self.team) + _create_event( + distinct_id=f"id-{random_uuid}-{index}", + event=f"clicky-{index}", + team=self.team, + ) flush_persons_and_events() return random_uuid @@ -81,7 +91,11 @@ def test_persons_query_properties(self): runner = self._create_runner( PersonsQuery( properties=[ - PersonPropertyFilter(key="random_uuid", value=self.random_uuid, operator=PropertyOperator.exact), + PersonPropertyFilter( + key="random_uuid", + value=self.random_uuid, + operator=PropertyOperator.exact, + ), HogQLPropertyFilter(key="toInt(properties.index) > 5"), ] ) @@ -93,7 +107,11 @@ def test_persons_query_fixed_properties(self): runner = self._create_runner( PersonsQuery( fixedProperties=[ - PersonPropertyFilter(key="random_uuid", value=self.random_uuid, operator=PropertyOperator.exact), + PersonPropertyFilter( + key="random_uuid", + value=self.random_uuid, + operator=PropertyOperator.exact, + ), HogQLPropertyFilter(key="toInt(properties.index) < 2"), ] ) @@ -144,7 +162,12 @@ def test_persons_query_limit(self): self.assertEqual(response.hasMore, True) runner = self._create_runner( - PersonsQuery(select=["properties.email"], orderBy=["properties.email DESC"], limit=1, offset=2) + PersonsQuery( + select=["properties.email"], + orderBy=["properties.email DESC"], + limit=1, + offset=2, + ) ) response = runner.calculate() self.assertEqual(response.results, [[f"jacob7@{self.random_uuid}.posthog.com"]]) @@ -153,7 +176,11 @@ def test_persons_query_limit(self): def test_source_hogql_query(self): self.random_uuid = self._create_random_persons() source_query = HogQLQuery(query="SELECT distinct person_id FROM events WHERE event='clicky-4'") - query = PersonsQuery(select=["properties.email"], orderBy=["properties.email DESC"], source=source_query) + query = PersonsQuery( + select=["properties.email"], + orderBy=["properties.email DESC"], + source=source_query, + ) runner = self._create_runner(query) response = runner.calculate() self.assertEqual(response.results, [[f"jacob4@{self.random_uuid}.posthog.com"]]) @@ -165,12 +192,20 @@ def test_source_lifecycle_query(self): source_query = LifecycleQuery( series=[EventsNode(event="clicky-4")], properties=[ - PersonPropertyFilter(key="random_uuid", value=self.random_uuid, operator=PropertyOperator.exact) + PersonPropertyFilter( + key="random_uuid", + value=self.random_uuid, + operator=PropertyOperator.exact, + ) ], interval=IntervalType.day, dateRange=DateRange(date_from="-7d"), ) - query = PersonsQuery(select=["properties.email"], orderBy=["properties.email DESC"], source=source_query) + query = PersonsQuery( + select=["properties.email"], + orderBy=["properties.email DESC"], + source=source_query, + ) runner = self._create_runner(query) response = runner.calculate() self.assertEqual(response.results, [[f"jacob4@{self.random_uuid}.posthog.com"]]) diff --git a/posthog/hogql_queries/test/test_query_runner.py b/posthog/hogql_queries/test/test_query_runner.py index 9ac9cb5956df2..5b82b0fae5af9 100644 --- a/posthog/hogql_queries/test/test_query_runner.py +++ b/posthog/hogql_queries/test/test_query_runner.py @@ -6,7 +6,11 @@ from freezegun import freeze_time from pydantic import BaseModel -from posthog.hogql_queries.query_runner import QueryResponse, QueryRunner, RunnableQueryNode +from posthog.hogql_queries.query_runner import ( + QueryResponse, + QueryRunner, + RunnableQueryNode, +) from posthog.models.team.team import Team from posthog.test.base import BaseTest diff --git a/posthog/hogql_queries/utils/query_date_range.py b/posthog/hogql_queries/utils/query_date_range.py index 9c2a99e62d61f..be4e993326486 100644 --- a/posthog/hogql_queries/utils/query_date_range.py +++ b/posthog/hogql_queries/utils/query_date_range.py @@ -11,7 +11,11 @@ from posthog.models.team import Team from posthog.queries.util import get_earliest_timestamp from posthog.schema import DateRange, IntervalType -from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping +from posthog.utils import ( + DEFAULT_DATE_FROM_DAYS, + relative_date_parse, + relative_date_parse_with_delta_mapping, +) # Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries @@ -24,7 +28,11 @@ class QueryDateRange: _now_without_timezone: datetime def __init__( - self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime + self, + date_range: Optional[DateRange], + team: Team, + interval: Optional[IntervalType], + now: datetime, ) -> None: self._team = team self._date_range = date_range @@ -40,7 +48,10 @@ def date_to(self) -> datetime: if self._date_range and self._date_range.date_to: date_to, delta_mapping = relative_date_parse_with_delta_mapping( - self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone + self._date_range.date_to, + self._team.timezone_info, + always_truncate=True, + now=self.now_with_timezone, ) is_relative = not self._date_range or not self._date_range.date_to or delta_mapping is not None @@ -60,7 +71,9 @@ def date_from(self) -> datetime: date_from = self.get_earliest_timestamp() elif self._date_range and isinstance(self._date_range.date_from, str): date_from = relative_date_parse( - self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone + self._date_range.date_from, + self._team.timezone_info, + now=self.now_with_timezone, ) else: date_from = self.now_with_timezone.replace(hour=0, minute=0, second=0, microsecond=0) - relativedelta( @@ -106,25 +119,38 @@ def interval_name(self) -> str: def date_to_as_hogql(self) -> ast.Expr: return ast.Call( - name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])] + name="assumeNotNull", + args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_to_str))])], ) def date_from_as_hogql(self) -> ast.Expr: return ast.Call( - name="assumeNotNull", args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])] + name="assumeNotNull", + args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.date_from_str))])], ) def previous_period_date_from_as_hogql(self) -> ast.Expr: return ast.Call( name="assumeNotNull", - args=[ast.Call(name="toDateTime", args=[(ast.Constant(value=self.previous_period_date_from_str))])], + args=[ + ast.Call( + name="toDateTime", + args=[(ast.Constant(value=self.previous_period_date_from_str))], + ) + ], ) def one_interval_period(self) -> ast.Expr: - return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Constant(value=1)]) + return ast.Call( + name=f"toInterval{self.interval_name.capitalize()}", + args=[ast.Constant(value=1)], + ) def number_interval_periods(self) -> ast.Expr: - return ast.Call(name=f"toInterval{self.interval_name.capitalize()}", args=[ast.Field(chain=["number"])]) + return ast.Call( + name=f"toInterval{self.interval_name.capitalize()}", + args=[ast.Field(chain=["number"])], + ) def interval_period_string_as_hogql_constant(self) -> ast.Expr: return ast.Constant(value=self.interval_name) @@ -143,7 +169,13 @@ def to_properties(self, field: Optional[List[str]] = None) -> List[ast.Expr]: field = ["timestamp"] return [ ast.CompareOperation( - left=ast.Field(chain=field), op=CompareOperationOp.LtEq, right=self.date_to_as_hogql() + left=ast.Field(chain=field), + op=CompareOperationOp.LtEq, + right=self.date_to_as_hogql(), + ), + ast.CompareOperation( + left=ast.Field(chain=field), + op=CompareOperationOp.Gt, + right=self.date_to_as_hogql(), ), - ast.CompareOperation(left=ast.Field(chain=field), op=CompareOperationOp.Gt, right=self.date_to_as_hogql()), ] diff --git a/posthog/hogql_queries/utils/query_previous_period_date_range.py b/posthog/hogql_queries/utils/query_previous_period_date_range.py index ac16f0b9eec10..c127ac3e36d07 100644 --- a/posthog/hogql_queries/utils/query_previous_period_date_range.py +++ b/posthog/hogql_queries/utils/query_previous_period_date_range.py @@ -4,7 +4,10 @@ from posthog.hogql_queries.utils.query_date_range import QueryDateRange from posthog.models.team import Team from posthog.schema import DateRange, IntervalType -from posthog.utils import get_compare_period_dates, relative_date_parse_with_delta_mapping +from posthog.utils import ( + get_compare_period_dates, + relative_date_parse_with_delta_mapping, +) # Originally similar to posthog/queries/query_date_range.py but rewritten to be used in HogQL queries @@ -17,14 +20,20 @@ class QueryPreviousPeriodDateRange(QueryDateRange): _now_without_timezone: datetime def __init__( - self, date_range: Optional[DateRange], team: Team, interval: Optional[IntervalType], now: datetime + self, + date_range: Optional[DateRange], + team: Team, + interval: Optional[IntervalType], + now: datetime, ) -> None: super().__init__(date_range, team, interval, now) def date_from_delta_mappings(self) -> Dict[str, int] | None: if self._date_range and isinstance(self._date_range.date_from, str) and self._date_range.date_from != "all": delta_mapping = relative_date_parse_with_delta_mapping( - self._date_range.date_from, self._team.timezone_info, now=self.now_with_timezone + self._date_range.date_from, + self._team.timezone_info, + now=self.now_with_timezone, )[1] return delta_mapping @@ -33,7 +42,10 @@ def date_from_delta_mappings(self) -> Dict[str, int] | None: def date_to_delta_mappings(self) -> Dict[str, int] | None: if self._date_range and self._date_range.date_to: delta_mapping = relative_date_parse_with_delta_mapping( - self._date_range.date_to, self._team.timezone_info, always_truncate=True, now=self.now_with_timezone + self._date_range.date_to, + self._team.timezone_info, + always_truncate=True, + now=self.now_with_timezone, )[1] return delta_mapping return None diff --git a/posthog/hogql_queries/web_analytics/stats_table.py b/posthog/hogql_queries/web_analytics/stats_table.py index 12e739413f2d1..dbd92defd2814 100644 --- a/posthog/hogql_queries/web_analytics/stats_table.py +++ b/posthog/hogql_queries/web_analytics/stats_table.py @@ -5,7 +5,9 @@ COUNTS_CTE, BOUNCE_RATE_CTE, ) -from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner +from posthog.hogql_queries.web_analytics.web_analytics_query_runner import ( + WebAnalyticsQueryRunner, +) from posthog.schema import ( WebStatsTableQuery, WebStatsBreakdown, diff --git a/posthog/hogql_queries/web_analytics/top_clicks.py b/posthog/hogql_queries/web_analytics/top_clicks.py index 004cad7947c93..1693f2c1d86ce 100644 --- a/posthog/hogql_queries/web_analytics/top_clicks.py +++ b/posthog/hogql_queries/web_analytics/top_clicks.py @@ -4,7 +4,9 @@ from posthog.hogql.parser import parse_select from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.utils.query_date_range import QueryDateRange -from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner +from posthog.hogql_queries.web_analytics.web_analytics_query_runner import ( + WebAnalyticsQueryRunner, +) from posthog.models.filters.mixins.utils import cached_property from posthog.schema import WebTopClicksQuery, WebTopClicksQueryResponse @@ -51,9 +53,17 @@ def calculate(self): ) return WebTopClicksQueryResponse( - columns=response.columns, results=response.results, timings=response.timings, types=response.types + columns=response.columns, + results=response.results, + timings=response.timings, + types=response.types, ) @cached_property def query_date_range(self): - return QueryDateRange(date_range=self.query.dateRange, team=self.team, interval=None, now=datetime.now()) + return QueryDateRange( + date_range=self.query.dateRange, + team=self.team, + interval=None, + now=datetime.now(), + ) diff --git a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py index a9d0092565f59..16f31272d43a4 100644 --- a/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py +++ b/posthog/hogql_queries/web_analytics/web_analytics_query_runner.py @@ -35,7 +35,12 @@ def _refresh_frequency(self): @cached_property def query_date_range(self): - return QueryDateRange(date_range=self.query.dateRange, team=self.team, interval=None, now=datetime.now()) + return QueryDateRange( + date_range=self.query.dateRange, + team=self.team, + interval=None, + now=datetime.now(), + ) @cached_property def pathname_property_filter(self) -> Optional[EventPropertyFilter]: diff --git a/posthog/hogql_queries/web_analytics/web_overview.py b/posthog/hogql_queries/web_analytics/web_overview.py index 062bf72d2968e..19a587245443d 100644 --- a/posthog/hogql_queries/web_analytics/web_overview.py +++ b/posthog/hogql_queries/web_analytics/web_overview.py @@ -7,7 +7,9 @@ from posthog.hogql.property import property_to_expr from posthog.hogql.query import execute_hogql_query from posthog.hogql_queries.utils.query_date_range import QueryDateRange -from posthog.hogql_queries.web_analytics.web_analytics_query_runner import WebAnalyticsQueryRunner +from posthog.hogql_queries.web_analytics.web_analytics_query_runner import ( + WebAnalyticsQueryRunner, +) from posthog.models.filters.mixins.utils import cached_property from posthog.schema import WebOverviewQueryResponse, WebOverviewQuery @@ -118,14 +120,23 @@ def calculate(self): @cached_property def query_date_range(self): - return QueryDateRange(date_range=self.query.dateRange, team=self.team, interval=None, now=datetime.now()) + return QueryDateRange( + date_range=self.query.dateRange, + team=self.team, + interval=None, + now=datetime.now(), + ) def event_properties(self) -> ast.Expr: return property_to_expr(self.query.properties, team=self.team) def to_data( - key: str, kind: str, value: Optional[float], previous: Optional[float], is_increase_bad: Optional[bool] = None + key: str, + kind: str, + value: Optional[float], + previous: Optional[float], + is_increase_bad: Optional[bool] = None, ) -> dict: if kind == "percentage": if value is not None: diff --git a/posthog/jwt.py b/posthog/jwt.py index 73d42c80c3ee1..fa458ab2f5e3f 100644 --- a/posthog/jwt.py +++ b/posthog/jwt.py @@ -20,7 +20,11 @@ def encode_jwt(payload: dict, expiry_delta: timedelta, audience: PosthogJwtAudie raise Exception("Audience must be in the list of PostHog-supported audiences") encoded_jwt = jwt.encode( - {**payload, "exp": datetime.now(tz=timezone.utc) + expiry_delta, "aud": audience.value}, + { + **payload, + "exp": datetime.now(tz=timezone.utc) + expiry_delta, + "aud": audience.value, + }, settings.SECRET_KEY, algorithm="HS256", ) diff --git a/posthog/kafka_client/client.py b/posthog/kafka_client/client.py index 2de052c8e73f8..a22de73a8fffe 100644 --- a/posthog/kafka_client/client.py +++ b/posthog/kafka_client/client.py @@ -6,7 +6,11 @@ from django.conf import settings from kafka import KafkaConsumer as KC from kafka import KafkaProducer as KP -from kafka.producer.future import FutureProduceResult, FutureRecordMetadata, RecordMetadata +from kafka.producer.future import ( + FutureProduceResult, + FutureRecordMetadata, + RecordMetadata, +) from kafka.structs import TopicPartition from statshog.defaults.django import statsd from structlog import get_logger @@ -24,7 +28,13 @@ class KafkaProducerForTests: def __init__(self): pass - def send(self, topic: str, value: Any, key: Any = None, headers: Optional[List[Tuple[str, bytes]]] = None): + def send( + self, + topic: str, + value: Any, + key: Any = None, + headers: Optional[List[Tuple[str, bytes]]] = None, + ): produce_future = FutureProduceResult(topic_partition=TopicPartition(topic, 1)) future = FutureRecordMetadata( produce_future=produce_future, @@ -81,7 +91,10 @@ class _KafkaSecurityProtocol(str, Enum): def _sasl_params(): - if settings.KAFKA_SECURITY_PROTOCOL in [_KafkaSecurityProtocol.SASL_PLAINTEXT, _KafkaSecurityProtocol.SASL_SSL]: + if settings.KAFKA_SECURITY_PROTOCOL in [ + _KafkaSecurityProtocol.SASL_PLAINTEXT, + _KafkaSecurityProtocol.SASL_SSL, + ]: return { "sasl_mechanism": settings.KAFKA_SASL_MECHANISM, "sasl_plain_username": settings.KAFKA_SASL_USER, @@ -135,7 +148,10 @@ def on_send_success(self, record_metadata: RecordMetadata): statsd.incr("posthog_cloud_kafka_send_success", tags={"topic": record_metadata.topic}) def on_send_failure(self, topic: str, exc: Exception): - statsd.incr("posthog_cloud_kafka_send_failure", tags={"topic": topic, "exception": exc.__class__.__name__}) + statsd.incr( + "posthog_cloud_kafka_send_failure", + tags={"topic": topic, "exception": exc.__class__.__name__}, + ) def produce( self, @@ -208,7 +224,10 @@ def build_kafka_consumer( ): if test: consumer = KafkaConsumerForTests( - topic=topic, auto_offset_reset=auto_offset_reset, max=10, consumer_timeout_ms=consumer_timeout_ms + topic=topic, + auto_offset_reset=auto_offset_reset, + max=10, + consumer_timeout_ms=consumer_timeout_ms, ) elif settings.KAFKA_BASE64_KEYS: consumer = helper.get_kafka_consumer( diff --git a/posthog/logging/timing.py b/posthog/logging/timing.py index b736450e0d8d7..d83b692fb2894 100644 --- a/posthog/logging/timing.py +++ b/posthog/logging/timing.py @@ -34,7 +34,8 @@ def wrapper(*args, **kwargs): finally: duration = round((time() - start) * 1000, 1) print( # noqa T201 - f"Timed function: {fn_name} took {duration}ms with args", {"args": args, "kwargs": kwargs} + f"Timed function: {fn_name} took {duration}ms with args", + {"args": args, "kwargs": kwargs}, ) return wrapper diff --git a/posthog/management/commands/backfill_persons_and_groups_on_events.py b/posthog/management/commands/backfill_persons_and_groups_on_events.py index 04880e7fa32dc..b7fb2fcbc46e9 100644 --- a/posthog/management/commands/backfill_persons_and_groups_on_events.py +++ b/posthog/management/commands/backfill_persons_and_groups_on_events.py @@ -138,7 +138,6 @@ def print_and_execute_query(sql: str, name: str, dry_run: bool, timeout=180, que def run_backfill(options): - if not options["team_id"]: logger.error("You must specify --team-id to run this script") exit(1) @@ -149,12 +148,20 @@ def run_backfill(options): print("Dry run. Queries to run:", end="\n\n") print_and_execute_query(GROUPS_DICTIONARY_SQL, "GROUPS_DICTIONARY_SQL", dry_run) - print_and_execute_query(PERSON_DISTINCT_IDS_DICTIONARY_SQL, "PERSON_DISTINCT_IDS_DICTIONARY_SQL", dry_run) + print_and_execute_query( + PERSON_DISTINCT_IDS_DICTIONARY_SQL, + "PERSON_DISTINCT_IDS_DICTIONARY_SQL", + dry_run, + ) print_and_execute_query(PERSONS_DICTIONARY_SQL, "PERSONS_DICTIONARY_SQL", dry_run) tag_queries(kind="backfill", id=backfill_query_id) print_and_execute_query( - BACKFILL_SQL, "BACKFILL_SQL", dry_run, 0, {"team_id": options["team_id"], "id": backfill_query_id} + BACKFILL_SQL, + "BACKFILL_SQL", + dry_run, + 0, + {"team_id": options["team_id"], "id": backfill_query_id}, ) reset_query_tags() @@ -177,11 +184,17 @@ class Command(BaseCommand): help = "Backfill persons and groups data on events for a given team" def add_arguments(self, parser): - - parser.add_argument("--team-id", default=None, type=str, help="Specify a team to backfill data for.") + parser.add_argument( + "--team-id", + default=None, + type=str, + help="Specify a team to backfill data for.", + ) parser.add_argument( - "--live-run", action="store_true", help="Opts out of default 'dry run' mode and actually runs the queries." + "--live-run", + action="store_true", + help="Opts out of default 'dry run' mode and actually runs the queries.", ) def handle(self, *args, **options): diff --git a/posthog/management/commands/create_batch_export_from_app.py b/posthog/management/commands/create_batch_export_from_app.py index 510e0f3dbfa4d..6fa577f582c55 100644 --- a/posthog/management/commands/create_batch_export_from_app.py +++ b/posthog/management/commands/create_batch_export_from_app.py @@ -15,7 +15,9 @@ class Command(BaseCommand): def add_arguments(self, parser): """Add arguments to the parser.""" parser.add_argument( - "--plugin-config-id", type=int, help="The ID of the PluginConfig to use as a base for the new BatchExport" + "--plugin-config-id", + type=int, + help="The ID of the PluginConfig to use as a base for the new BatchExport", ) parser.add_argument( "--team-id", @@ -116,7 +118,11 @@ def handle(self, *args, **options): end_at = dt.datetime.utcnow() start_at = end_at - (dt.timedelta(hours=1) if interval == "hour" else dt.timedelta(days=1)) backfill_export( - client, batch_export_id=str(batch_export.id), team_id=team_id, start_at=start_at, end_at=end_at + client, + batch_export_id=str(batch_export.id), + team_id=team_id, + start_at=start_at, + end_at=end_at, ) self.stdout.write(f"Triggered backfill for BatchExport '{name}'.") diff --git a/posthog/management/commands/create_ch_migration.py b/posthog/management/commands/create_ch_migration.py index 3b5334498589b..3f4495a0825b9 100644 --- a/posthog/management/commands/create_ch_migration.py +++ b/posthog/management/commands/create_ch_migration.py @@ -10,6 +10,7 @@ operations = [] """ + # ex: python manage.py create_ch_migration class Command(BaseCommand): help = "Create blank clickhouse migration" diff --git a/posthog/management/commands/execute_temporal_workflow.py b/posthog/management/commands/execute_temporal_workflow.py index 73d61979ab909..df9f5d993fc07 100644 --- a/posthog/management/commands/execute_temporal_workflow.py +++ b/posthog/management/commands/execute_temporal_workflow.py @@ -31,17 +31,45 @@ def add_arguments(self, parser): "Set an ID in order to limit concurrency." ), ) - parser.add_argument("--temporal-host", default=settings.TEMPORAL_HOST, help="Hostname for Temporal Scheduler") - parser.add_argument("--temporal-port", default=settings.TEMPORAL_PORT, help="Port for Temporal Scheduler") - parser.add_argument("--namespace", default=settings.TEMPORAL_NAMESPACE, help="Namespace to connect to") - parser.add_argument("--task-queue", default=settings.TEMPORAL_TASK_QUEUE, help="Task queue to service") parser.add_argument( - "--server-root-ca-cert", default=settings.TEMPORAL_CLIENT_ROOT_CA, help="Optional root server CA cert" + "--temporal-host", + default=settings.TEMPORAL_HOST, + help="Hostname for Temporal Scheduler", ) - parser.add_argument("--client-cert", default=settings.TEMPORAL_CLIENT_CERT, help="Optional client cert") - parser.add_argument("--client-key", default=settings.TEMPORAL_CLIENT_KEY, help="Optional client key") parser.add_argument( - "--max-attempts", default=settings.TEMPORAL_WORKFLOW_MAX_ATTEMPTS, help="Number of max attempts" + "--temporal-port", + default=settings.TEMPORAL_PORT, + help="Port for Temporal Scheduler", + ) + parser.add_argument( + "--namespace", + default=settings.TEMPORAL_NAMESPACE, + help="Namespace to connect to", + ) + parser.add_argument( + "--task-queue", + default=settings.TEMPORAL_TASK_QUEUE, + help="Task queue to service", + ) + parser.add_argument( + "--server-root-ca-cert", + default=settings.TEMPORAL_CLIENT_ROOT_CA, + help="Optional root server CA cert", + ) + parser.add_argument( + "--client-cert", + default=settings.TEMPORAL_CLIENT_CERT, + help="Optional client cert", + ) + parser.add_argument( + "--client-key", + default=settings.TEMPORAL_CLIENT_KEY, + help="Optional client key", + ) + parser.add_argument( + "--max-attempts", + default=settings.TEMPORAL_WORKFLOW_MAX_ATTEMPTS, + help="Number of max attempts", ) def handle(self, *args, **options): diff --git a/posthog/management/commands/generate_demo_data.py b/posthog/management/commands/generate_demo_data.py index 3948813b854ad..2a6e27c992345 100644 --- a/posthog/management/commands/generate_demo_data.py +++ b/posthog/management/commands/generate_demo_data.py @@ -20,7 +20,9 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--seed", type=str, help="Simulation seed for deterministic output") parser.add_argument( - "--now", type=dt.datetime.fromisoformat, help="Simulation 'now' datetime in ISO format (default: now)" + "--now", + type=dt.datetime.fromisoformat, + help="Simulation 'now' datetime in ISO format (default: now)", ) parser.add_argument( "--days-past", @@ -34,7 +36,12 @@ def add_arguments(self, parser): default=30, help="At how many days after 'now' should the simulation end (default: 30)", ) - parser.add_argument("--n-clusters", type=int, default=500, help="Number of clusters (default: 500)") + parser.add_argument( + "--n-clusters", + type=int, + default=500, + help="Number of clusters (default: 500)", + ) parser.add_argument("--dry-run", action="store_true", help="Don't save simulation results") parser.add_argument( "--team-id", @@ -43,10 +50,16 @@ def add_arguments(self, parser): help="If specified, an existing project with this ID will be used, and no new user will be created. If the ID is 0, data will be generated for the master project (but insights etc. won't be created)", ) parser.add_argument( - "--email", type=str, default="test@posthog.com", help="Email of the demo user (default: test@posthog.com)" + "--email", + type=str, + default="test@posthog.com", + help="Email of the demo user (default: test@posthog.com)", ) parser.add_argument( - "--password", type=str, default="12345678", help="Password of the demo user (default: 12345678)" + "--password", + type=str, + default="12345678", + help="Password of the demo user (default: 12345678)", ) def handle(self, *args, **options): @@ -74,7 +87,12 @@ def handle(self, *args, **options): ) print("Running simulation...") matrix.simulate() - self.print_results(matrix, seed=seed, duration=monotonic() - timer, verbosity=options["verbosity"]) + self.print_results( + matrix, + seed=seed, + duration=monotonic() - timer, + verbosity=options["verbosity"], + ) if not options["dry_run"]: email = options["email"] password = options["password"] @@ -89,7 +107,11 @@ def handle(self, *args, **options): matrix_manager.run_on_team(team, existing_user) else: matrix_manager.ensure_account_and_save( - email, "Employee 427", "Hedgebox Inc.", password=password, disallow_collision=True + email, + "Employee 427", + "Hedgebox Inc.", + password=password, + disallow_collision=True, ) except exceptions.ValidationError as e: print(f"Error: {e}") diff --git a/posthog/management/commands/makemigrations.py b/posthog/management/commands/makemigrations.py index 3ab70d9bc0800..8ff0a37bfaa34 100644 --- a/posthog/management/commands/makemigrations.py +++ b/posthog/management/commands/makemigrations.py @@ -1,6 +1,8 @@ """Cause git to detect a merge conflict when two branches have migrations.""" -from django.core.management.commands.makemigrations import Command as MakeMigrationsCommand +from django.core.management.commands.makemigrations import ( + Command as MakeMigrationsCommand, +) from django.db.migrations.loader import MigrationLoader diff --git a/posthog/management/commands/migrate_clickhouse.py b/posthog/management/commands/migrate_clickhouse.py index 82da287a1743d..b9a4d31eea3d9 100644 --- a/posthog/management/commands/migrate_clickhouse.py +++ b/posthog/management/commands/migrate_clickhouse.py @@ -6,7 +6,12 @@ from infi.clickhouse_orm.migrations import MigrationHistory from infi.clickhouse_orm.utils import import_submodules -from posthog.settings import CLICKHOUSE_DATABASE, CLICKHOUSE_HTTP_URL, CLICKHOUSE_PASSWORD, CLICKHOUSE_USER +from posthog.settings import ( + CLICKHOUSE_DATABASE, + CLICKHOUSE_HTTP_URL, + CLICKHOUSE_PASSWORD, + CLICKHOUSE_USER, +) from posthog.settings.data_stores import CLICKHOUSE_CLUSTER MIGRATIONS_PACKAGE_NAME = "posthog.clickhouse.migrations" @@ -17,14 +22,25 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - "--upto", default=99_999, type=int, help="Database state will be brought to the state after that migration." + "--upto", + default=99_999, + type=int, + help="Database state will be brought to the state after that migration.", ) - parser.add_argument("--fake", action="store_true", help="Mark migrations as run without actually running them.") parser.add_argument( - "--check", action="store_true", help="Exits with a non-zero status if unapplied migrations exist." + "--fake", + action="store_true", + help="Mark migrations as run without actually running them.", ) parser.add_argument( - "--plan", action="store_true", help="Shows a list of the migration actions that will be performed." + "--check", + action="store_true", + help="Exits with a non-zero status if unapplied migrations exist.", + ) + parser.add_argument( + "--plan", + action="store_true", + help="Shows a list of the migration actions that will be performed.", ) parser.add_argument( "--print-sql", diff --git a/posthog/management/commands/notify_helm_install.py b/posthog/management/commands/notify_helm_install.py index ce0b5c3b333f1..684261cdae418 100644 --- a/posthog/management/commands/notify_helm_install.py +++ b/posthog/management/commands/notify_helm_install.py @@ -25,5 +25,10 @@ def handle(self, *args, **options): posthoganalytics.api_key = "sTMFPsFhdP1Ssg" disabled = posthoganalytics.disabled posthoganalytics.disabled = False - posthoganalytics.capture(get_machine_id(), "helm_install", report, groups={"instance": settings.SITE_URL}) + posthoganalytics.capture( + get_machine_id(), + "helm_install", + report, + groups={"instance": settings.SITE_URL}, + ) posthoganalytics.disabled = disabled diff --git a/posthog/management/commands/partition.py b/posthog/management/commands/partition.py index 68a24aef0efc0..b17e958b0c1e1 100644 --- a/posthog/management/commands/partition.py +++ b/posthog/management/commands/partition.py @@ -18,7 +18,6 @@ def add_arguments(self, parser): parser.add_argument("--reverse", action="store_true", help="unpartition event table") def handle(self, *args, **options): - if options["reverse"]: print("Reversing partitions...") with connection.cursor() as cursor: diff --git a/posthog/management/commands/plugin_server_load_test.py b/posthog/management/commands/plugin_server_load_test.py index 3fe197c154393..4adfe8941e644 100644 --- a/posthog/management/commands/plugin_server_load_test.py +++ b/posthog/management/commands/plugin_server_load_test.py @@ -32,7 +32,9 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--seed", type=str, help="Simulation seed for deterministic output") parser.add_argument( - "--now", type=dt.datetime.fromisoformat, help="Simulation 'now' datetime in ISO format (default: now)" + "--now", + type=dt.datetime.fromisoformat, + help="Simulation 'now' datetime in ISO format (default: now)", ) parser.add_argument( "--days-past", @@ -46,9 +48,17 @@ def add_arguments(self, parser): default=30, help="At how many days after 'now' should the simulation end (default: 30)", ) - parser.add_argument("--n-clusters", type=int, default=500, help="Number of clusters (default: 500)") parser.add_argument( - "--team-id", type=str, default="1", help="The team to which the events should be associated." + "--n-clusters", + type=int, + default=500, + help="Number of clusters (default: 500)", + ) + parser.add_argument( + "--team-id", + type=str, + default="1", + help="The team to which the events should be associated.", ) def handle(self, *args, **options): @@ -83,7 +93,8 @@ def handle(self, *args, **options): # Make sure events are ordered by time to simulate how they would be # ingested in real life. ordered_events = sorted( - chain.from_iterable(person.all_events for person in matrix.people), key=lambda e: e.timestamp + chain.from_iterable(person.all_events for person in matrix.people), + key=lambda e: e.timestamp, ) start_time = time.monotonic() @@ -107,7 +118,11 @@ def handle(self, *args, **options): offsets = admin.list_consumer_group_offsets(group_id="clickhouse-ingestion") end_offsets = consumer.end_offsets([TopicPartition(topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, partition=0)]) if end_offsets is None: - logger.error("no_end_offsets", topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, partition=0) + logger.error( + "no_end_offsets", + topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, + partition=0, + ) sys.exit(1) end_offset = end_offsets[TopicPartition(topic=KAFKA_EVENTS_PLUGIN_INGESTION_TOPIC, partition=0)] diff --git a/posthog/management/commands/run_async_migrations.py b/posthog/management/commands/run_async_migrations.py index e0b9cfef5cb20..611c6038fd43b 100644 --- a/posthog/management/commands/run_async_migrations.py +++ b/posthog/management/commands/run_async_migrations.py @@ -6,8 +6,16 @@ from django.core.management.base import BaseCommand from semantic_version.base import Version -from posthog.async_migrations.runner import complete_migration, is_migration_dependency_fulfilled, start_async_migration -from posthog.async_migrations.setup import ALL_ASYNC_MIGRATIONS, setup_async_migrations, setup_model +from posthog.async_migrations.runner import ( + complete_migration, + is_migration_dependency_fulfilled, + start_async_migration, +) +from posthog.async_migrations.setup import ( + ALL_ASYNC_MIGRATIONS, + setup_async_migrations, + setup_model, +) from posthog.constants import FROZEN_POSTHOG_VERSION from posthog.models.async_migration import ( AsyncMigration, @@ -41,7 +49,9 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - "--check", action="store_true", help="Exits with a non-zero status if required unapplied migrations exist." + "--check", + action="store_true", + help="Exits with a non-zero status if required unapplied migrations exist.", ) parser.add_argument( "--plan", diff --git a/posthog/management/commands/send_usage_report.py b/posthog/management/commands/send_usage_report.py index 03e4b4a102da4..cfcd7c8758516 100644 --- a/posthog/management/commands/send_usage_report.py +++ b/posthog/management/commands/send_usage_report.py @@ -9,11 +9,21 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--dry-run", type=bool, help="Print information instead of sending it") parser.add_argument("--date", type=str, help="The date to be ran in format YYYY-MM-DD") - parser.add_argument("--event-name", type=str, help="Override the event name to be sent - for testing") parser.add_argument( - "--skip-capture-event", type=str, help="Skip the posthog capture events - for retrying to billing service" + "--event-name", + type=str, + help="Override the event name to be sent - for testing", + ) + parser.add_argument( + "--skip-capture-event", + type=str, + help="Skip the posthog capture events - for retrying to billing service", + ) + parser.add_argument( + "--organization-id", + type=str, + help="Only send the report for this organization ID", ) - parser.add_argument("--organization-id", type=str, help="Only send the report for this organization ID") parser.add_argument("--async", type=bool, help="Run the task asynchronously") def handle(self, *args, **options): @@ -26,11 +36,19 @@ def handle(self, *args, **options): if run_async: send_all_org_usage_reports.delay( - dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id + dry_run, + date, + event_name, + skip_capture_event=skip_capture_event, + only_organization_id=organization_id, ) else: send_all_org_usage_reports( - dry_run, date, event_name, skip_capture_event=skip_capture_event, only_organization_id=organization_id + dry_run, + date, + event_name, + skip_capture_event=skip_capture_event, + only_organization_id=organization_id, ) if dry_run: diff --git a/posthog/management/commands/setup_dev.py b/posthog/management/commands/setup_dev.py index 09281d2b6c39f..42d6d33be512f 100644 --- a/posthog/management/commands/setup_dev.py +++ b/posthog/management/commands/setup_dev.py @@ -2,7 +2,15 @@ from django.db import transaction from posthog.demo.legacy import ORGANIZATION_NAME, TEAM_NAME, create_demo_data -from posthog.models import EventProperty, PersonalAPIKey, Plugin, PluginConfig, PluginSourceFile, Team, User +from posthog.models import ( + EventProperty, + PersonalAPIKey, + Plugin, + PluginConfig, + PluginSourceFile, + Team, + User, +) from posthog.models.event_definition import EventDefinition from posthog.models.personal_api_key import hash_key_value from posthog.models.property_definition import PropertyDefinition @@ -13,7 +21,11 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--no-data", action="store_true", help="Create demo account without data") - parser.add_argument("--create-e2e-test-plugin", action="store_true", help="Create plugin for charts E2E test") + parser.add_argument( + "--create-e2e-test-plugin", + action="store_true", + help="Create plugin for charts E2E test", + ) def handle(self, *args, **options): print("\n⚠️ setup_dev is deprecated. Use the more robust generate_demo_data command instead.\n") # noqa T201 @@ -43,7 +55,9 @@ def handle(self, *args, **options): PropertyDefinition.objects.create(name="is_demo", type=PropertyDefinition.Type.PERSON, team=team) PersonalAPIKey.objects.create( - user=user, label="e2e_demo_api_key key", secure_value=hash_key_value("e2e_demo_api_key") + user=user, + label="e2e_demo_api_key key", + secure_value=hash_key_value("e2e_demo_api_key"), ) if not options["no_data"]: create_demo_data(team) @@ -62,7 +76,9 @@ def create_plugin(self, team): plugin_config = PluginConfig.objects.create(plugin=plugin, team=team, order=1, config={}) PluginSourceFile.objects.update_or_create( - plugin=plugin, filename="plugin.json", source='{ "name": "e2e test plugin", "config": [] }' + plugin=plugin, + filename="plugin.json", + source='{ "name": "e2e test plugin", "config": [] }', ) PluginSourceFile.objects.update_or_create( plugin=plugin, diff --git a/posthog/management/commands/split_person.py b/posthog/management/commands/split_person.py index a7b52be1bc786..f32804b14b33c 100644 --- a/posthog/management/commands/split_person.py +++ b/posthog/management/commands/split_person.py @@ -18,10 +18,18 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("--team-id", default=None, type=int, help="Specify a team to fix data for.") - parser.add_argument("--person-id", default=None, type=int, help="Specify the person ID to split.") + parser.add_argument( + "--person-id", + default=None, + type=int, + help="Specify the person ID to split.", + ) parser.add_argument("--live-run", action="store_true", help="Run changes, default is dry-run") parser.add_argument( - "--max-splits", default=None, type=int, help="Only split off a given number of distinct_ids and exit." + "--max-splits", + default=None, + type=int, + help="Only split off a given number of distinct_ids and exit.", ) def handle(self, *args, **options): diff --git a/posthog/management/commands/start_temporal_worker.py b/posthog/management/commands/start_temporal_worker.py index 0ea2feea50f85..6e10a28b31b7d 100644 --- a/posthog/management/commands/start_temporal_worker.py +++ b/posthog/management/commands/start_temporal_worker.py @@ -14,15 +14,41 @@ class Command(BaseCommand): help = "Start Temporal Python Django-aware Worker" def add_arguments(self, parser): - parser.add_argument("--temporal_host", default=settings.TEMPORAL_HOST, help="Hostname for Temporal Scheduler") - parser.add_argument("--temporal_port", default=settings.TEMPORAL_PORT, help="Port for Temporal Scheduler") - parser.add_argument("--namespace", default=settings.TEMPORAL_NAMESPACE, help="Namespace to connect to") - parser.add_argument("--task-queue", default=settings.TEMPORAL_TASK_QUEUE, help="Task queue to service") parser.add_argument( - "--server-root-ca-cert", default=settings.TEMPORAL_CLIENT_ROOT_CA, help="Optional root server CA cert" + "--temporal_host", + default=settings.TEMPORAL_HOST, + help="Hostname for Temporal Scheduler", + ) + parser.add_argument( + "--temporal_port", + default=settings.TEMPORAL_PORT, + help="Port for Temporal Scheduler", + ) + parser.add_argument( + "--namespace", + default=settings.TEMPORAL_NAMESPACE, + help="Namespace to connect to", + ) + parser.add_argument( + "--task-queue", + default=settings.TEMPORAL_TASK_QUEUE, + help="Task queue to service", + ) + parser.add_argument( + "--server-root-ca-cert", + default=settings.TEMPORAL_CLIENT_ROOT_CA, + help="Optional root server CA cert", + ) + parser.add_argument( + "--client-cert", + default=settings.TEMPORAL_CLIENT_CERT, + help="Optional client cert", + ) + parser.add_argument( + "--client-key", + default=settings.TEMPORAL_CLIENT_KEY, + help="Optional client key", ) - parser.add_argument("--client-cert", default=settings.TEMPORAL_CLIENT_CERT, help="Optional client cert") - parser.add_argument("--client-key", default=settings.TEMPORAL_CLIENT_KEY, help="Optional client key") def handle(self, *args, **options): temporal_host = options["temporal_host"] diff --git a/posthog/management/commands/sync_available_features.py b/posthog/management/commands/sync_available_features.py index 516e1eed78490..841cf210cffdf 100644 --- a/posthog/management/commands/sync_available_features.py +++ b/posthog/management/commands/sync_available_features.py @@ -3,7 +3,9 @@ import structlog from django.core.management.base import BaseCommand -from posthog.tasks.sync_all_organization_available_features import sync_all_organization_available_features +from posthog.tasks.sync_all_organization_available_features import ( + sync_all_organization_available_features, +) logger = structlog.get_logger(__name__) logger.setLevel(logging.INFO) diff --git a/posthog/management/commands/sync_feature_flags.py b/posthog/management/commands/sync_feature_flags.py index 2459d7f2c80c9..186316bb6a2df 100644 --- a/posthog/management/commands/sync_feature_flags.py +++ b/posthog/management/commands/sync_feature_flags.py @@ -56,14 +56,26 @@ def handle(self, *args, **options): "groups": [{"properties": [], "rollout_percentage": None}], "multivariate": { "variants": [ - {"key": "control", "name": "Control", "rollout_percentage": 0}, - {"key": "test", "name": "Test", "rollout_percentage": 100}, + { + "key": "control", + "name": "Control", + "rollout_percentage": 0, + }, + { + "key": "test", + "name": "Test", + "rollout_percentage": 100, + }, ] }, }, ) else: FeatureFlag.objects.create( - team=team, rollout_percentage=100, name=flag, key=flag, created_by=first_user + team=team, + rollout_percentage=100, + name=flag, + key=flag, + created_by=first_user, ) print(f"Created feature flag '{flag} for team {team.id} {' - ' + team.name if team.name else ''}") diff --git a/posthog/management/commands/sync_persons_to_clickhouse.py b/posthog/management/commands/sync_persons_to_clickhouse.py index 9e3af26deb3b5..6bf7639fcfa33 100644 --- a/posthog/management/commands/sync_persons_to_clickhouse.py +++ b/posthog/management/commands/sync_persons_to_clickhouse.py @@ -36,7 +36,9 @@ def add_arguments(self, parser): parser.add_argument("--person-override", action="store_true", help="Sync person overrides") parser.add_argument("--group", action="store_true", help="Sync groups") parser.add_argument( - "--deletes", action="store_true", help="process deletes for data in ClickHouse but not Postgres" + "--deletes", + action="store_true", + help="process deletes for data in ClickHouse but not Postgres", ) parser.add_argument("--live-run", action="store_true", help="Run changes, default is dry-run") diff --git a/posthog/management/commands/sync_replicated_schema.py b/posthog/management/commands/sync_replicated_schema.py index 35b73e2808378..40d4ab8d32ca5 100644 --- a/posthog/management/commands/sync_replicated_schema.py +++ b/posthog/management/commands/sync_replicated_schema.py @@ -24,7 +24,9 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( - "--dry-run", action="store_true", help="Exits with a non-zero status if schema changes would be required." + "--dry-run", + action="store_true", + help="Exits with a non-zero status if schema changes would be required.", ) def handle(self, *args, **options): @@ -35,7 +37,10 @@ def handle(self, *args, **options): _, create_table_queries, out_of_sync_hosts = self.analyze_cluster_tables() if len(out_of_sync_hosts) > 0: - logger.info("Schema out of sync on some clickhouse nodes!", out_of_sync_hosts=out_of_sync_hosts) + logger.info( + "Schema out of sync on some clickhouse nodes!", + out_of_sync_hosts=out_of_sync_hosts, + ) if options.get("dry_run"): exit(1) @@ -81,7 +86,9 @@ def get_out_of_sync_hosts(self, host_tables: Dict[HostName, Set[TableName]]) -> return out_of_sync def create_missing_tables( - self, out_of_sync_hosts: Dict[HostName, Set[TableName]], create_table_queries: Dict[TableName, Query] + self, + out_of_sync_hosts: Dict[HostName, Set[TableName]], + create_table_queries: Dict[TableName, Query], ): missing_tables = set(table for tables in out_of_sync_hosts.values() for table in tables) @@ -95,5 +102,5 @@ def run_on_cluster(self, create_table_query: Query) -> Query: r"^CREATE TABLE (\S+)", f"CREATE TABLE IF NOT EXISTS \\1 ON CLUSTER '{settings.CLICKHOUSE_CLUSTER}'", create_table_query, - 1, + count=1, ) diff --git a/posthog/management/commands/test/test_backfill_persons_and_groups_on_events.py b/posthog/management/commands/test/test_backfill_persons_and_groups_on_events.py index d67b74e1ab466..3f410beef2372 100644 --- a/posthog/management/commands/test/test_backfill_persons_and_groups_on_events.py +++ b/posthog/management/commands/test/test_backfill_persons_and_groups_on_events.py @@ -6,7 +6,9 @@ from posthog.client import sync_execute from posthog.conftest import create_clickhouse_tables -from posthog.management.commands.backfill_persons_and_groups_on_events import run_backfill +from posthog.management.commands.backfill_persons_and_groups_on_events import ( + run_backfill, +) from posthog.models.event.sql import EVENTS_DATA_TABLE from posthog.test.base import BaseTest, ClickhouseTestMixin @@ -73,7 +75,11 @@ def test_person_backfill(self): events_after = sync_execute("select event, person_id, person_properties from events") self.assertEqual( - events_after, [("event1", person_id, '{ "foo": "bar" }'), ("event2", person_id, '{ "foo": "bar" }')] + events_after, + [ + ("event1", person_id, '{ "foo": "bar" }'), + ("event2", person_id, '{ "foo": "bar" }'), + ], ) def test_groups_backfill(self): @@ -99,4 +105,7 @@ def test_groups_backfill(self): sleep(10) events_after = sync_execute("select event, $group_0, group0_properties from events") - self.assertEqual(events_after, [("event1", "my_group", group_props), ("event2", "my_group", group_props)]) + self.assertEqual( + events_after, + [("event1", "my_group", group_props), ("event2", "my_group", group_props)], + ) diff --git a/posthog/management/commands/test/test_create_batch_export_from_app.py b/posthog/management/commands/test/test_create_batch_export_from_app.py index fb216dc4f2bb4..aabe5ad511c99 100644 --- a/posthog/management/commands/test/test_create_batch_export_from_app.py +++ b/posthog/management/commands/test/test_create_batch_export_from_app.py @@ -92,7 +92,11 @@ def config(request): @pytest.fixture def snowflake_plugin_config(snowflake_plugin, team) -> typing.Generator[PluginConfig, None, None]: plugin_config = PluginConfig.objects.create( - plugin=snowflake_plugin, order=1, team=team, enabled=True, config=test_snowflake_config + plugin=snowflake_plugin, + order=1, + team=team, + enabled=True, + config=test_snowflake_config, ) yield plugin_config plugin_config.delete() diff --git a/posthog/management/commands/test/test_fix_person_distinct_ids_after_delete.py b/posthog/management/commands/test/test_fix_person_distinct_ids_after_delete.py index 2698af803934c..954dac77d9c06 100644 --- a/posthog/management/commands/test/test_fix_person_distinct_ids_after_delete.py +++ b/posthog/management/commands/test/test_fix_person_distinct_ids_after_delete.py @@ -24,7 +24,11 @@ class TestFixPersonDistinctIdsAfterDelete(BaseTest, ClickhouseTestMixin): def test_dry_run(self, mocked_ch_call): # clickhouse only deleted person and distinct id that should be updated ch_only_deleted_person_uuid = create_person( - uuid=str(uuid4()), team_id=self.team.pk, is_deleted=True, version=5, sync=True + uuid=str(uuid4()), + team_id=self.team.pk, + is_deleted=True, + version=5, + sync=True, ) create_person_distinct_id( team_id=self.team.pk, @@ -39,7 +43,10 @@ def test_dry_run(self, mocked_ch_call): team_id=self.team.pk, properties={"abcdefg": 11112}, version=1, uuid=uuid4() ) PersonDistinctId.objects.create( - team=self.team, person=person_linked_to_after, distinct_id="distinct_id", version=0 + team=self.team, + person=person_linked_to_after, + distinct_id="distinct_id", + version=0, ) options = {"live_run": False, "team_id": self.team.pk, "new_version": 2500} run(options, True) @@ -61,7 +68,13 @@ def test_dry_run(self, mocked_ch_call): self.assertEqual( ch_person_distinct_ids, [ - (UUID(ch_only_deleted_person_uuid), self.team.pk, "distinct_id", 7, True), + ( + UUID(ch_only_deleted_person_uuid), + self.team.pk, + "distinct_id", + 7, + True, + ), ], ) mocked_ch_call.assert_not_called() @@ -73,7 +86,11 @@ def test_dry_run(self, mocked_ch_call): def test_live_run(self, mocked_ch_call): # clickhouse only deleted person and distinct id that should be updated ch_only_deleted_person_uuid = create_person( - uuid=str(uuid4()), team_id=self.team.pk, is_deleted=True, version=5, sync=True + uuid=str(uuid4()), + team_id=self.team.pk, + is_deleted=True, + version=5, + sync=True, ) create_person_distinct_id( team_id=self.team.pk, @@ -96,10 +113,16 @@ def test_live_run(self, mocked_ch_call): team_id=self.team.pk, properties={"abcdefg": 11112}, version=1, uuid=uuid4() ) PersonDistinctId.objects.create( - team=self.team, person=person_linked_to_after, distinct_id="distinct_id", version=0 + team=self.team, + person=person_linked_to_after, + distinct_id="distinct_id", + version=0, ) PersonDistinctId.objects.create( - team=self.team, person=person_linked_to_after, distinct_id="distinct_id-2", version=0 + team=self.team, + person=person_linked_to_after, + distinct_id="distinct_id-2", + version=0, ) options = {"live_run": True, "team_id": self.team.pk, "new_version": 2500} run(options, True) @@ -110,7 +133,8 @@ def test_live_run(self, mocked_ch_call): self.assertEqual(pg_distinct_ids[0].version, 2500) self.assertEqual(pg_distinct_ids[1].version, 2500) self.assertEqual( - {pg_distinct_ids[0].distinct_id, pg_distinct_ids[1].distinct_id}, {"distinct_id", "distinct_id-2"} + {pg_distinct_ids[0].distinct_id, pg_distinct_ids[1].distinct_id}, + {"distinct_id", "distinct_id-2"}, ) self.assertEqual(pg_distinct_ids[0].person.uuid, person_linked_to_after.uuid) self.assertEqual(pg_distinct_ids[1].person.uuid, person_linked_to_after.uuid) @@ -126,7 +150,13 @@ def test_live_run(self, mocked_ch_call): ch_person_distinct_ids, [ (person_linked_to_after.uuid, self.team.pk, "distinct_id", 2500, False), - (person_linked_to_after.uuid, self.team.pk, "distinct_id-2", 2500, False), + ( + person_linked_to_after.uuid, + self.team.pk, + "distinct_id-2", + 2500, + False, + ), ], ) self.assertEqual(mocked_ch_call.call_count, 2) @@ -145,7 +175,10 @@ def test_no_op(self, mocked_ch_call): # distinct id no update PersonDistinctId.objects.create( - team=self.team, person=person_not_changed_1, distinct_id="distinct_id-1", version=0 + team=self.team, + person=person_not_changed_1, + distinct_id="distinct_id-1", + version=0, ) # deleted person not re-used @@ -153,7 +186,10 @@ def test_no_op(self, mocked_ch_call): team_id=self.team.pk, properties={"abcdef": 1111}, version=0, uuid=uuid4() ) PersonDistinctId.objects.create( - team=self.team, person=person_deleted_1, distinct_id="distinct_id-del-1", version=16 + team=self.team, + person=person_deleted_1, + distinct_id="distinct_id-del-1", + version=16, ) person_deleted_1.delete() diff --git a/posthog/management/commands/test/test_migrate_kafka_data.py b/posthog/management/commands/test/test_migrate_kafka_data.py index 0053c7201b876..05bf9f0c47c3e 100644 --- a/posthog/management/commands/test/test_migrate_kafka_data.py +++ b/posthog/management/commands/test/test_migrate_kafka_data.py @@ -34,7 +34,12 @@ def test_can_migrate_data_from_one_topic_to_another_on_a_different_cluster(): _create_topic(new_events_topic) # Put some data to the old topic - _send_message(old_events_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")]) + _send_message( + old_events_topic, + b'{ "event": "test" }', + key=message_key.encode("utf-8"), + headers=[("foo", b"bar")], + ) migrate_kafka_data( "--from-topic", @@ -95,7 +100,12 @@ def test_we_do_not_migrate_when_dry_run_is_set(): _create_topic(new_events_topic) # Put some data to the old topic - _send_message(old_events_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")]) + _send_message( + old_events_topic, + b'{ "event": "test" }', + key=message_key.encode("utf-8"), + headers=[("foo", b"bar")], + ) migrate_kafka_data( "--from-topic", @@ -128,7 +138,12 @@ def test_cannot_send_data_back_into_same_topic_on_same_cluster(): _commit_offsets_for_topic(topic, consumer_group_id) # Put some data to the topic - _send_message(topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")]) + _send_message( + topic, + b'{ "event": "test" }', + key=message_key.encode("utf-8"), + headers=[("foo", b"bar")], + ) try: migrate_kafka_data( @@ -161,7 +176,12 @@ def test_that_the_command_fails_if_the_specified_consumer_group_does_not_exist() _create_topic(new_topic) # Put some data to the topic - _send_message(old_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")]) + _send_message( + old_topic, + b'{ "event": "test" }', + key=message_key.encode("utf-8"), + headers=[("foo", b"bar")], + ) try: migrate_kafka_data( @@ -195,7 +215,12 @@ def test_that_we_error_if_the_target_topic_doesnt_exist(): _commit_offsets_for_topic(old_topic, consumer_group_id) # Put some data to the topic - _send_message(old_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")]) + _send_message( + old_topic, + b'{ "event": "test" }', + key=message_key.encode("utf-8"), + headers=[("foo", b"bar")], + ) try: migrate_kafka_data( @@ -231,7 +256,12 @@ def test_we_fail_on_send_errors_to_new_topic(): _commit_offsets_for_topic(old_topic, consumer_group_id) # Put some data to the topic - _send_message(old_topic, b'{ "event": "test" }', key=message_key.encode("utf-8"), headers=[("foo", b"bar")]) + _send_message( + old_topic, + b'{ "event": "test" }', + key=message_key.encode("utf-8"), + headers=[("foo", b"bar")], + ) with mock.patch("kafka.KafkaProducer.send") as mock_send: produce_future = FutureProduceResult(topic_partition=TopicPartition(new_topic, 1)) diff --git a/posthog/management/commands/test/test_sync_persons_to_clickhouse.py b/posthog/management/commands/test/test_sync_persons_to_clickhouse.py index 56e956d04f73b..acde0c4630f19 100644 --- a/posthog/management/commands/test/test_sync_persons_to_clickhouse.py +++ b/posthog/management/commands/test/test_sync_persons_to_clickhouse.py @@ -29,7 +29,11 @@ class TestSyncPersonsToClickHouse(BaseTest, ClickhouseTestMixin): def test_persons_sync(self): with mute_selected_signals(): # without creating/updating in clickhouse person = Person.objects.create( - team_id=self.team.pk, properties={"a": 1234}, is_identified=True, version=4, uuid=uuid4() + team_id=self.team.pk, + properties={"a": 1234}, + is_identified=True, + version=4, + uuid=uuid4(), ) run_person_sync(self.team.pk, live_run=True, deletes=False, sync=True) @@ -45,7 +49,11 @@ def test_persons_sync(self): def test_persons_sync_with_null_version(self): with mute_selected_signals(): # without creating/updating in clickhouse person = Person.objects.create( - team_id=self.team.pk, properties={"a": 1234}, is_identified=True, version=None, uuid=uuid4() + team_id=self.team.pk, + properties={"a": 1234}, + is_identified=True, + version=None, + uuid=uuid4(), ) run_person_sync(self.team.pk, live_run=True, deletes=False, sync=True) @@ -59,7 +67,13 @@ def test_persons_sync_with_null_version(self): self.assertEqual(ch_persons, [(person.uuid, self.team.pk, '{"a": 1234}', True, 0, False)]) def test_persons_deleted(self): - uuid = create_person(uuid=str(uuid4()), team_id=self.team.pk, version=5, properties={"abc": 123}, sync=True) + uuid = create_person( + uuid=str(uuid4()), + team_id=self.team.pk, + version=5, + properties={"abc": 123}, + sync=True, + ) run_person_sync(self.team.pk, live_run=True, deletes=True, sync=True) @@ -104,7 +118,12 @@ def test_distinct_ids_sync_with_null_version(self): def test_distinct_ids_deleted(self): uuid = uuid4() create_person_distinct_id( - team_id=self.team.pk, distinct_id="test-id-7", person_id=str(uuid), is_deleted=False, version=7, sync=True + team_id=self.team.pk, + distinct_id="test-id-7", + person_id=str(uuid), + is_deleted=False, + version=7, + sync=True, ) run_distinct_id_sync(self.team.pk, live_run=True, deletes=True, sync=True) @@ -114,7 +133,10 @@ def test_distinct_ids_deleted(self): """, {"team_id": self.team.pk}, ) - self.assertEqual(ch_person_distinct_ids, [(UUID(int=0), self.team.pk, "test-id-7", 107, True)]) + self.assertEqual( + ch_person_distinct_ids, + [(UUID(int=0), self.team.pk, "test-id-7", 107, True)], + ) @mock.patch( f"{posthog.management.commands.sync_persons_to_clickhouse.__name__}.raw_create_group_ch", @@ -156,7 +178,13 @@ def test_group_sync(self, mocked_ch_call): wraps=posthog.management.commands.sync_persons_to_clickhouse.raw_create_group_ch, ) def test_group_sync_updates_group(self, mocked_ch_call): - group = create_group(self.team.pk, 2, "group-key", {"a": 5}, timestamp=datetime.utcnow() - timedelta(hours=3)) + group = create_group( + self.team.pk, + 2, + "group-key", + {"a": 5}, + timestamp=datetime.utcnow() - timedelta(hours=3), + ) group.group_properties = {"a": 5, "b": 3} group.save() @@ -175,9 +203,18 @@ def test_group_sync_updates_group(self, mocked_ch_call): self.assertEqual(ch_group[0], 2) self.assertEqual(ch_group[1], "group-key") self.assertEqual(ch_group[2], '{"a": 5, "b": 3}') - self.assertEqual(ch_group[3].strftime("%Y-%m-%d %H:%M:%S"), group.created_at.strftime("%Y-%m-%d %H:%M:%S")) - self.assertGreaterEqual(ch_group[4].strftime("%Y-%m-%d %H:%M:%S"), ts_before.strftime("%Y-%m-%d %H:%M:%S")) - self.assertLessEqual(ch_group[4].strftime("%Y-%m-%d %H:%M:%S"), datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")) + self.assertEqual( + ch_group[3].strftime("%Y-%m-%d %H:%M:%S"), + group.created_at.strftime("%Y-%m-%d %H:%M:%S"), + ) + self.assertGreaterEqual( + ch_group[4].strftime("%Y-%m-%d %H:%M:%S"), + ts_before.strftime("%Y-%m-%d %H:%M:%S"), + ) + self.assertLessEqual( + ch_group[4].strftime("%Y-%m-%d %H:%M:%S"), + datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), + ) # second time it's a no-op run_group_sync(self.team.pk, live_run=True, sync=True) @@ -256,15 +293,24 @@ def everything_test_run(self, live_run): # 2 persons who should be created with mute_selected_signals(): # without creating/updating in clickhouse person_should_be_created_1 = Person.objects.create( - team_id=self.team.pk, properties={"abcde": 12553633}, version=2, uuid=uuid4() + team_id=self.team.pk, + properties={"abcde": 12553633}, + version=2, + uuid=uuid4(), ) person_should_be_created_2 = Person.objects.create( - team_id=self.team.pk, properties={"abcdeit34": 12553633}, version=3, uuid=uuid4() + team_id=self.team.pk, + properties={"abcdeit34": 12553633}, + version=3, + uuid=uuid4(), ) # 2 persons who have updates person_should_update_1 = Person.objects.create( - team_id=self.team.pk, properties={"abcde": 12553}, version=5, uuid=uuid4() + team_id=self.team.pk, + properties={"abcde": 12553}, + version=5, + uuid=uuid4(), ) person_should_update_2 = Person.objects.create( team_id=self.team.pk, properties={"abc": 125}, version=7, uuid=uuid4() @@ -286,35 +332,61 @@ def everything_test_run(self, live_run): # 2 persons need to be deleted deleted_person_1_uuid = create_person( - uuid=str(uuid4()), team_id=self.team.pk, version=7, properties={"abcd": 123}, sync=True + uuid=str(uuid4()), + team_id=self.team.pk, + version=7, + properties={"abcd": 123}, + sync=True, ) deleted_person_2_uuid = create_person( - uuid=str(uuid4()), team_id=self.team.pk, version=8, properties={"abcef": 123}, sync=True + uuid=str(uuid4()), + team_id=self.team.pk, + version=8, + properties={"abcef": 123}, + sync=True, ) # 2 distinct id no update PersonDistinctId.objects.create( - team=self.team, person=person_not_changed_1, distinct_id="distinct_id", version=0 + team=self.team, + person=person_not_changed_1, + distinct_id="distinct_id", + version=0, ) PersonDistinctId.objects.create( - team=self.team, person=person_not_changed_1, distinct_id="distinct_id-9", version=9 + team=self.team, + person=person_not_changed_1, + distinct_id="distinct_id-9", + version=9, ) # # 2 distinct id to be created with mute_selected_signals(): # without creating/updating in clickhouse PersonDistinctId.objects.create( - team=self.team, person=person_not_changed_1, distinct_id="distinct_id-10", version=10 + team=self.team, + person=person_not_changed_1, + distinct_id="distinct_id-10", + version=10, ) PersonDistinctId.objects.create( - team=self.team, person=person_not_changed_1, distinct_id="distinct_id-11", version=11 + team=self.team, + person=person_not_changed_1, + distinct_id="distinct_id-11", + version=11, ) # 2 distinct id that need to update PersonDistinctId.objects.create( - team=self.team, person=person_not_changed_2, distinct_id="distinct_id-12", version=13 + team=self.team, + person=person_not_changed_2, + distinct_id="distinct_id-12", + version=13, ) PersonDistinctId.objects.create( - team=self.team, person=person_not_changed_2, distinct_id="distinct_id-14", version=15 + team=self.team, + person=person_not_changed_2, + distinct_id="distinct_id-14", + version=15, ) create_person_distinct_id( team_id=self.team.pk, @@ -397,23 +469,95 @@ def everything_test_run(self, live_run): self.assertEqual( ch_persons, [ - (person_not_changed_1.uuid, self.team.pk, '{"abcdef": 1111}', False, 0, False), - (person_not_changed_2.uuid, self.team.pk, '{"abcdefg": 11112}', False, 1, False), - (person_should_update_1.uuid, self.team.pk, '{"a": 13}', False, 4, False), - (person_should_update_2.uuid, self.team.pk, '{"a": 1}', False, 6, False), - (UUID(deleted_person_1_uuid), self.team.pk, '{"abcd": 123}', False, 7, False), - (UUID(deleted_person_2_uuid), self.team.pk, '{"abcef": 123}', False, 8, False), + ( + person_not_changed_1.uuid, + self.team.pk, + '{"abcdef": 1111}', + False, + 0, + False, + ), + ( + person_not_changed_2.uuid, + self.team.pk, + '{"abcdefg": 11112}', + False, + 1, + False, + ), + ( + person_should_update_1.uuid, + self.team.pk, + '{"a": 13}', + False, + 4, + False, + ), + ( + person_should_update_2.uuid, + self.team.pk, + '{"a": 1}', + False, + 6, + False, + ), + ( + UUID(deleted_person_1_uuid), + self.team.pk, + '{"abcd": 123}', + False, + 7, + False, + ), + ( + UUID(deleted_person_2_uuid), + self.team.pk, + '{"abcef": 123}', + False, + 8, + False, + ), ], ) self.assertEqual( ch_person_distinct_ids, [ (person_not_changed_1.uuid, self.team.pk, "distinct_id", 0, False), - (person_not_changed_1.uuid, self.team.pk, "distinct_id-9", 9, False), - (person_not_changed_1.uuid, self.team.pk, "distinct_id-12", 12, False), - (person_not_changed_1.uuid, self.team.pk, "distinct_id-14", 14, False), - (deleted_distinct_id_1_uuid, self.team.pk, "distinct_id-17", 17, False), - (deleted_distinct_id_2_uuid, self.team.pk, "distinct_id-18", 18, False), + ( + person_not_changed_1.uuid, + self.team.pk, + "distinct_id-9", + 9, + False, + ), + ( + person_not_changed_1.uuid, + self.team.pk, + "distinct_id-12", + 12, + False, + ), + ( + person_not_changed_1.uuid, + self.team.pk, + "distinct_id-14", + 14, + False, + ), + ( + deleted_distinct_id_1_uuid, + self.team.pk, + "distinct_id-17", + 17, + False, + ), + ( + deleted_distinct_id_2_uuid, + self.team.pk, + "distinct_id-18", + 18, + False, + ), ], ) self.assertEqual(len(ch_groups), 0) @@ -421,12 +565,54 @@ def everything_test_run(self, live_run): self.assertEqual( ch_persons, [ - (person_not_changed_1.uuid, self.team.pk, '{"abcdef": 1111}', False, 0, False), - (person_not_changed_2.uuid, self.team.pk, '{"abcdefg": 11112}', False, 1, False), - (person_should_be_created_1.uuid, self.team.pk, '{"abcde": 12553633}', False, 2, False), - (person_should_be_created_2.uuid, self.team.pk, '{"abcdeit34": 12553633}', False, 3, False), - (person_should_update_1.uuid, self.team.pk, '{"abcde": 12553}', False, 5, False), - (person_should_update_2.uuid, self.team.pk, '{"abc": 125}', False, 7, False), + ( + person_not_changed_1.uuid, + self.team.pk, + '{"abcdef": 1111}', + False, + 0, + False, + ), + ( + person_not_changed_2.uuid, + self.team.pk, + '{"abcdefg": 11112}', + False, + 1, + False, + ), + ( + person_should_be_created_1.uuid, + self.team.pk, + '{"abcde": 12553633}', + False, + 2, + False, + ), + ( + person_should_be_created_2.uuid, + self.team.pk, + '{"abcdeit34": 12553633}', + False, + 3, + False, + ), + ( + person_should_update_1.uuid, + self.team.pk, + '{"abcde": 12553}', + False, + 5, + False, + ), + ( + person_should_update_2.uuid, + self.team.pk, + '{"abc": 125}', + False, + 7, + False, + ), (UUID(deleted_person_1_uuid), self.team.pk, "{}", False, 107, True), (UUID(deleted_person_2_uuid), self.team.pk, "{}", False, 108, True), ], @@ -435,11 +621,41 @@ def everything_test_run(self, live_run): ch_person_distinct_ids, [ (person_not_changed_1.uuid, self.team.pk, "distinct_id", 0, False), - (person_not_changed_1.uuid, self.team.pk, "distinct_id-9", 9, False), - (person_not_changed_1.uuid, self.team.pk, "distinct_id-10", 10, False), - (person_not_changed_1.uuid, self.team.pk, "distinct_id-11", 11, False), - (person_not_changed_2.uuid, self.team.pk, "distinct_id-12", 13, False), - (person_not_changed_2.uuid, self.team.pk, "distinct_id-14", 15, False), + ( + person_not_changed_1.uuid, + self.team.pk, + "distinct_id-9", + 9, + False, + ), + ( + person_not_changed_1.uuid, + self.team.pk, + "distinct_id-10", + 10, + False, + ), + ( + person_not_changed_1.uuid, + self.team.pk, + "distinct_id-11", + 11, + False, + ), + ( + person_not_changed_2.uuid, + self.team.pk, + "distinct_id-12", + 13, + False, + ), + ( + person_not_changed_2.uuid, + self.team.pk, + "distinct_id-14", + 15, + False, + ), (UUID(int=0), self.team.pk, "distinct_id-17", 117, True), (UUID(int=0), self.team.pk, "distinct_id-18", 118, True), ], diff --git a/posthog/management/commands/test/test_sync_replicated_schema.py b/posthog/management/commands/test/test_sync_replicated_schema.py index 83ba19901b229..8b51b9259b5c0 100644 --- a/posthog/management/commands/test/test_sync_replicated_schema.py +++ b/posthog/management/commands/test/test_sync_replicated_schema.py @@ -21,7 +21,11 @@ def recreate_database(self, create_tables=True): def test_analyze_test_cluster(self): self.recreate_database(create_tables=True) - host_tables, create_table_queries, out_of_sync_hosts = Command().analyze_cluster_tables() + ( + host_tables, + create_table_queries, + out_of_sync_hosts, + ) = Command().analyze_cluster_tables() self.assertEqual(len(host_tables), 1) self.assertGreater(len(create_table_queries), 0) @@ -34,7 +38,11 @@ def test_analyze_test_cluster(self): def test_analyze_empty_cluster(self): self.recreate_database(create_tables=False) - host_tables, create_table_queries, out_of_sync_hosts = Command().analyze_cluster_tables() + ( + host_tables, + create_table_queries, + out_of_sync_hosts, + ) = Command().analyze_cluster_tables() self.assertEqual(host_tables, {}) self.assertEqual(create_table_queries, {}) diff --git a/posthog/middleware.py b/posthog/middleware.py index 406197944996e..b480580afad40 100644 --- a/posthog/middleware.py +++ b/posthog/middleware.py @@ -13,7 +13,11 @@ from django.middleware.csrf import CsrfViewMiddleware from django.urls import resolve from django.utils.cache import add_never_cache_headers -from django_prometheus.middleware import Metrics, PrometheusAfterMiddleware, PrometheusBeforeMiddleware +from django_prometheus.middleware import ( + Metrics, + PrometheusAfterMiddleware, + PrometheusBeforeMiddleware, +) from rest_framework import status from statshog.defaults.django import statsd @@ -26,12 +30,11 @@ from posthog.metrics import LABEL_TEAM_ID from posthog.models import Action, Cohort, Dashboard, FeatureFlag, Insight, Team, User from posthog.rate_limit import DecideRateThrottle -from posthog.settings import SITE_URL +from posthog.settings import SITE_URL, DEBUG from posthog.settings.statsd import STATSD_HOST from posthog.user_permissions import UserPermissions -from .utils_cors import cors_response - from .auth import PersonalAPIKeyAuthentication +from .utils_cors import cors_response ALWAYS_ALLOWED_ENDPOINTS = [ "decide", @@ -45,6 +48,10 @@ "_health", ] +if DEBUG: + # /i/ is the new root path for capture endpoints + ALWAYS_ALLOWED_ENDPOINTS.append("i") + default_cookie_options = { "max_age": 365 * 24 * 60 * 60, # one year "expires": None, @@ -113,6 +120,8 @@ def process_view(self, request, callback, callback_args, callback_kwargs): # if super().process_view did not find a valid CSRF token, try looking for a personal API key if result is not None and PersonalAPIKeyAuthentication.find_key_with_source(request) is not None: return self._accept(request) + if DEBUG and request.path.split("/")[1] in ALWAYS_ALLOWED_ENDPOINTS: + return self._accept(request) return result def _accept(self, request): @@ -228,7 +237,10 @@ def __call__(self, request: HttpRequest): response: HttpResponse = self.get_response(request) if "api/" in request.path and "capture" not in request.path: - statsd.incr("http_api_request_response", tags={"id": route_id, "status_code": response.status_code}) + statsd.incr( + "http_api_request_response", + tags={"id": route_id, "status_code": response.status_code}, + ) return response finally: @@ -243,7 +255,13 @@ def _get_param(self, request: HttpRequest, name: str): class QueryTimeCountingMiddleware: - ALLOW_LIST_ROUTES = ["dashboard", "insight", "property_definitions", "properties", "person"] + ALLOW_LIST_ROUTES = [ + "dashboard", + "insight", + "property_definitions", + "properties", + "person", + ] def __init__(self, get_response): self.get_response = get_response @@ -286,7 +304,8 @@ class ShortCircuitMiddleware: def __init__(self, get_response): self.get_response = get_response self.decide_throttler = DecideRateThrottle( - replenish_rate=settings.DECIDE_BUCKET_REPLENISH_RATE, bucket_capacity=settings.DECIDE_BUCKET_CAPACITY + replenish_rate=settings.DECIDE_BUCKET_REPLENISH_RATE, + bucket_capacity=settings.DECIDE_BUCKET_CAPACITY, ) def __call__(self, request: HttpRequest): @@ -391,7 +410,12 @@ def __call__(self, request: HttpRequest): resolver_match = resolve(request.path) request.resolver_match = resolver_match for middleware in self.CAPTURE_MIDDLEWARE: - middleware.process_view(request, resolver_match.func, resolver_match.args, resolver_match.kwargs) + middleware.process_view( + request, + resolver_match.func, + resolver_match.args, + resolver_match.kwargs, + ) response: HttpResponse = get_event(request) diff --git a/posthog/migrations/0001_initial.py b/posthog/migrations/0001_initial.py index a196986062dd6..4a4f27e763557 100644 --- a/posthog/migrations/0001_initial.py +++ b/posthog/migrations/0001_initial.py @@ -10,7 +10,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ diff --git a/posthog/migrations/0001_initial_squashed_0284_improved_caching_state_idx.py b/posthog/migrations/0001_initial_squashed_0284_improved_caching_state_idx.py index cab1b248467f5..d862ca30ecf22 100644 --- a/posthog/migrations/0001_initial_squashed_0284_improved_caching_state_idx.py +++ b/posthog/migrations/0001_initial_squashed_0284_improved_caching_state_idx.py @@ -324,11 +324,28 @@ class Migration(migrations.Migration): migrations.CreateModel( name="User", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("password", models.CharField(max_length=128, verbose_name="password")), - ("last_login", models.DateTimeField(blank=True, null=True, verbose_name="last login")), - ("first_name", models.CharField(blank=True, max_length=150, verbose_name="first name")), - ("last_name", models.CharField(blank=True, max_length=150, verbose_name="last name")), + ( + "last_login", + models.DateTimeField(blank=True, null=True, verbose_name="last login"), + ), + ( + "first_name", + models.CharField(blank=True, max_length=150, verbose_name="first name"), + ), + ( + "last_name", + models.CharField(blank=True, max_length=150, verbose_name="last name"), + ), ( "is_staff", models.BooleanField( @@ -345,7 +362,10 @@ class Migration(migrations.Migration): verbose_name="active", ), ), - ("date_joined", models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined")), + ( + "date_joined", + models.DateTimeField(default=django.utils.timezone.now, verbose_name="date joined"), + ), # NOTE: to achieve parity with the constraint names from the # unsquashed migration, we need to apply uniqueness separately # as Django appears to have different behaviour in these cases. @@ -362,10 +382,22 @@ class Migration(migrations.Migration): "temporary_token", models.CharField(blank=True, max_length=200, null=True), ), # NOTE: we make this unique later - ("distinct_id", models.CharField(blank=True, max_length=200)), # NOTE: we make this unique later - ("email_opt_in", models.BooleanField(blank=True, default=False, null=True)), - ("partial_notification_settings", models.JSONField(blank=True, null=True)), - ("anonymize_data", models.BooleanField(blank=True, default=False, null=True)), + ( + "distinct_id", + models.CharField(blank=True, max_length=200), + ), # NOTE: we make this unique later + ( + "email_opt_in", + models.BooleanField(blank=True, default=False, null=True), + ), + ( + "partial_notification_settings", + models.JSONField(blank=True, null=True), + ), + ( + "anonymize_data", + models.BooleanField(blank=True, default=False, null=True), + ), ( "toolbar_mode", models.CharField( @@ -376,7 +408,10 @@ class Migration(migrations.Migration): null=True, ), ), - ("events_column_config", models.JSONField(default=posthog.models.user.events_column_config_default)), + ( + "events_column_config", + models.JSONField(default=posthog.models.user.events_column_config_default), + ), ], options={ "verbose_name": "user", @@ -414,22 +449,44 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Action", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("name", models.CharField(blank=True, max_length=400, null=True)), ("description", models.TextField(blank=True, default="")), ("created_at", models.DateTimeField(auto_now_add=True)), ("deleted", models.BooleanField(default=False)), ("post_to_slack", models.BooleanField(default=False)), - ("slack_message_format", models.CharField(blank=True, default="", max_length=600)), + ( + "slack_message_format", + models.CharField(blank=True, default="", max_length=600), + ), ("is_calculating", models.BooleanField(default=False)), ("updated_at", models.DateTimeField(auto_now=True)), - ("last_calculated_at", models.DateTimeField(blank=True, default=django.utils.timezone.now)), + ( + "last_calculated_at", + models.DateTimeField(blank=True, default=django.utils.timezone.now), + ), ], ), migrations.CreateModel( name="ActionStep", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("tag_name", models.CharField(blank=True, max_length=400, null=True)), ("text", models.CharField(blank=True, max_length=400, null=True)), ("href", models.CharField(blank=True, max_length=65535, null=True)), @@ -439,7 +496,11 @@ class Migration(migrations.Migration): "url_matching", models.CharField( blank=True, - choices=[("contains", "contains"), ("regex", "regex"), ("exact", "exact")], + choices=[ + ("contains", "contains"), + ("regex", "regex"), + ("exact", "exact"), + ], default="contains", max_length=400, null=True, @@ -456,7 +517,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("team_id", models.PositiveIntegerField(null=True)), @@ -468,7 +532,8 @@ class Migration(migrations.Migration): ( "detail", models.JSONField( - encoder=posthog.models.activity_logging.activity_log.ActivityDetailEncoder, null=True + encoder=posthog.models.activity_logging.activity_log.ActivityDetailEncoder, + null=True, ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), @@ -477,9 +542,20 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Annotation", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("content", models.CharField(blank=True, max_length=400, null=True)), - ("created_at", models.DateTimeField(default=django.utils.timezone.now, null=True)), + ( + "created_at", + models.DateTimeField(default=django.utils.timezone.now, null=True), + ), ("updated_at", models.DateTimeField(auto_now=True)), ( "scope", @@ -495,7 +571,11 @@ class Migration(migrations.Migration): ), ( "creation_type", - models.CharField(choices=[("USR", "user"), ("GIT", "GitHub")], default="USR", max_length=3), + models.CharField( + choices=[("USR", "user"), ("GIT", "GitHub")], + default="USR", + max_length=3, + ), ), ("date_marker", models.DateTimeField(blank=True, null=True)), ("deleted", models.BooleanField(default=False)), @@ -523,16 +603,28 @@ class Migration(migrations.Migration): fields=[ ("id", models.BigAutoField(primary_key=True, serialize=False)), ("name", models.CharField(max_length=50)), - ("description", models.CharField(blank=True, max_length=400, null=True)), + ( + "description", + models.CharField(blank=True, max_length=400, null=True), + ), ("progress", models.PositiveSmallIntegerField(default=0)), ("status", models.PositiveSmallIntegerField(default=0)), - ("current_operation_index", models.PositiveSmallIntegerField(default=0)), + ( + "current_operation_index", + models.PositiveSmallIntegerField(default=0), + ), ("current_query_id", models.CharField(default="", max_length=100)), ("celery_task_id", models.CharField(default="", max_length=100)), ("started_at", models.DateTimeField(blank=True, null=True)), ("finished_at", models.DateTimeField(blank=True, null=True)), - ("posthog_min_version", models.CharField(blank=True, max_length=20, null=True)), - ("posthog_max_version", models.CharField(blank=True, max_length=20, null=True)), + ( + "posthog_min_version", + models.CharField(blank=True, max_length=20, null=True), + ), + ( + "posthog_max_version", + models.CharField(blank=True, max_length=20, null=True), + ), ("parameters", models.JSONField(default=dict)), ], ), @@ -555,7 +647,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Cohort", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("name", models.CharField(blank=True, max_length=400, null=True)), ("description", models.CharField(blank=True, max_length=1000)), ("deleted", models.BooleanField(default=False)), @@ -563,7 +663,10 @@ class Migration(migrations.Migration): ("version", models.IntegerField(blank=True, null=True)), ("pending_version", models.IntegerField(blank=True, null=True)), ("count", models.IntegerField(blank=True, null=True)), - ("created_at", models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True)), + ( + "created_at", + models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True), + ), ("is_calculating", models.BooleanField(default=False)), ("last_calculation", models.DateTimeField(blank=True, null=True)), ("errors_calculating", models.IntegerField(default=0)), @@ -581,7 +684,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Dashboard", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("name", models.CharField(blank=True, max_length=400, null=True)), ("description", models.TextField(blank=True)), ("pinned", models.BooleanField(default=False)), @@ -592,7 +703,11 @@ class Migration(migrations.Migration): ( "creation_mode", models.CharField( - choices=[("default", "Default"), ("template", "Template"), ("duplicate", "Duplicate")], + choices=[ + ("default", "Default"), + ("template", "Template"), + ("duplicate", "Duplicate"), + ], default="default", max_length=16, ), @@ -610,7 +725,11 @@ class Migration(migrations.Migration): ( "deprecated_tags", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=list, + null=True, + size=None, ), ), ( @@ -624,17 +743,31 @@ class Migration(migrations.Migration): size=None, ), ), - ("share_token", models.CharField(blank=True, max_length=400, null=True)), + ( + "share_token", + models.CharField(blank=True, max_length=400, null=True), + ), ("is_shared", models.BooleanField(default=False)), ], ), migrations.CreateModel( name="DashboardTile", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("layouts", models.JSONField(default=dict)), ("color", models.CharField(blank=True, max_length=400, null=True)), - ("filters_hash", models.CharField(blank=True, max_length=400, null=True)), + ( + "filters_hash", + models.CharField(blank=True, max_length=400, null=True), + ), ("last_refresh", models.DateTimeField(blank=True, null=True)), ("refreshing", models.BooleanField(null=True)), ("refresh_attempt", models.IntegerField(blank=True, null=True)), @@ -644,7 +777,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Element", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("text", models.CharField(blank=True, max_length=10000, null=True)), ("tag_name", models.CharField(blank=True, max_length=1000, null=True)), ("href", models.CharField(blank=True, max_length=10000, null=True)), @@ -652,7 +793,10 @@ class Migration(migrations.Migration): ( "attr_class", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(blank=True, max_length=200), blank=True, null=True, size=None + base_field=models.CharField(blank=True, max_length=200), + blank=True, + null=True, + size=None, ), ), ("nth_child", models.IntegerField(blank=True, null=True)), @@ -664,20 +808,42 @@ class Migration(migrations.Migration): migrations.CreateModel( name="ElementGroup", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("hash", models.CharField(blank=True, max_length=400, null=True)), ], ), migrations.CreateModel( name="Event", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True, null=True)), ("event", models.CharField(blank=True, max_length=200, null=True)), ("distinct_id", models.CharField(max_length=200)), ("properties", models.JSONField(default=dict)), - ("timestamp", models.DateTimeField(blank=True, default=django.utils.timezone.now)), - ("elements_hash", models.CharField(blank=True, max_length=200, null=True)), + ( + "timestamp", + models.DateTimeField(blank=True, default=django.utils.timezone.now), + ), + ( + "elements_hash", + models.CharField(blank=True, max_length=200, null=True), + ), ("site_url", models.CharField(blank=True, max_length=200, null=True)), ("elements", models.JSONField(blank=True, default=list, null=True)), ], @@ -685,7 +851,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="EventBuffer", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("event", models.JSONField(blank=True, null=True)), ("process_at", models.DateTimeField()), ("locked", models.BooleanField()), @@ -697,11 +871,17 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=400)), - ("created_at", models.DateTimeField(default=django.utils.timezone.now, null=True)), + ( + "created_at", + models.DateTimeField(default=django.utils.timezone.now, null=True), + ), ("last_seen_at", models.DateTimeField(default=None, null=True)), ("volume_30_day", models.IntegerField(default=None, null=True)), ("query_usage_30_day", models.IntegerField(default=None, null=True)), @@ -710,7 +890,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="EventProperty", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("event", models.CharField(max_length=400)), ("property", models.CharField(max_length=400)), ], @@ -718,9 +906,20 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Experiment", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("name", models.CharField(max_length=400)), - ("description", models.CharField(blank=True, max_length=400, null=True)), + ( + "description", + models.CharField(blank=True, max_length=400, null=True), + ), ("filters", models.JSONField(default=dict)), ("parameters", models.JSONField(default=dict, null=True)), ("secondary_metrics", models.JSONField(default=list, null=True)), @@ -734,7 +933,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="ExportedAsset", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ( "export_format", models.CharField( @@ -749,7 +956,10 @@ class Migration(migrations.Migration): ("content", models.BinaryField(null=True)), ("created_at", models.DateTimeField(auto_now_add=True)), ("export_context", models.JSONField(blank=True, null=True)), - ("content_location", models.TextField(blank=True, max_length=1000, null=True)), + ( + "content_location", + models.TextField(blank=True, max_length=1000, null=True), + ), ( "access_token", models.CharField( @@ -764,7 +974,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="FeatureFlag", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=400)), ("name", models.TextField(blank=True)), ("filters", models.JSONField(default=dict)), @@ -774,13 +992,24 @@ class Migration(migrations.Migration): ("active", models.BooleanField(default=True)), ("rollback_conditions", models.JSONField(blank=True, null=True)), ("performed_rollback", models.BooleanField(blank=True, null=True)), - ("ensure_experience_continuity", models.BooleanField(blank=True, default=False, null=True)), + ( + "ensure_experience_continuity", + models.BooleanField(blank=True, default=False, null=True), + ), ], ), migrations.CreateModel( name="FeatureFlagHashKeyOverride", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("feature_flag_key", models.CharField(max_length=400)), ("hash_key", models.CharField(max_length=400)), ], @@ -788,14 +1017,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name="FeatureFlagOverride", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("override_value", models.JSONField()), ], ), migrations.CreateModel( name="Group", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("group_key", models.CharField(max_length=400)), ("group_type_index", models.IntegerField()), ("group_properties", models.JSONField(default=dict)), @@ -808,22 +1053,53 @@ class Migration(migrations.Migration): migrations.CreateModel( name="GroupTypeMapping", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("group_type", models.CharField(max_length=400)), ("group_type_index", models.IntegerField()), - ("name_singular", models.CharField(blank=True, max_length=400, null=True)), - ("name_plural", models.CharField(blank=True, max_length=400, null=True)), + ( + "name_singular", + models.CharField(blank=True, max_length=400, null=True), + ), + ( + "name_plural", + models.CharField(blank=True, max_length=400, null=True), + ), ], ), migrations.CreateModel( name="Insight", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("name", models.CharField(blank=True, max_length=400, null=True)), - ("derived_name", models.CharField(blank=True, max_length=400, null=True)), - ("description", models.CharField(blank=True, max_length=400, null=True)), + ( + "derived_name", + models.CharField(blank=True, max_length=400, null=True), + ), + ( + "description", + models.CharField(blank=True, max_length=400, null=True), + ), ("filters", models.JSONField(default=dict)), - ("filters_hash", models.CharField(blank=True, max_length=400, null=True)), + ( + "filters_hash", + models.CharField(blank=True, max_length=400, null=True), + ), ("order", models.IntegerField(blank=True, null=True)), ("deleted", models.BooleanField(default=False)), ("saved", models.BooleanField(default=False)), @@ -831,10 +1107,20 @@ class Migration(migrations.Migration): ("last_refresh", models.DateTimeField(blank=True, null=True)), ("refreshing", models.BooleanField(default=False)), ("is_sample", models.BooleanField(default=False)), - ("short_id", models.CharField(blank=True, default=posthog.utils.generate_short_id, max_length=12)), + ( + "short_id", + models.CharField( + blank=True, + default=posthog.utils.generate_short_id, + max_length=12, + ), + ), ("favorited", models.BooleanField(default=False)), ("refresh_attempt", models.IntegerField(blank=True, null=True)), - ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "last_modified_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ("layouts", models.JSONField(default=dict)), ("color", models.CharField(blank=True, max_length=400, null=True)), ("updated_at", models.DateTimeField(auto_now=True)), @@ -843,7 +1129,11 @@ class Migration(migrations.Migration): ( "deprecated_tags", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=list, + null=True, + size=None, ), ), ( @@ -868,7 +1158,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("cache_key", models.CharField(max_length=400)), @@ -883,14 +1176,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name="InsightViewed", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("last_viewed_at", models.DateTimeField()), ], ), migrations.CreateModel( name="InstanceSetting", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=128)), ("raw_value", models.CharField(blank=True, max_length=1024)), ], @@ -901,26 +1210,44 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=64)), - ("slug", posthog.models.utils.LowercaseSlugField(max_length=48, unique=True)), + ( + "slug", + posthog.models.utils.LowercaseSlugField(max_length=48, unique=True), + ), ("created_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), ( "plugins_access_level", models.PositiveSmallIntegerField( - choices=[(0, "none"), (3, "config"), (6, "install"), (9, "root")], default=3 + choices=[ + (0, "none"), + (3, "config"), + (6, "install"), + (9, "root"), + ], + default=3, ), ), ("for_internal_metrics", models.BooleanField(default=False)), ("is_member_join_email_enabled", models.BooleanField(default=True)), - ("customer_id", models.CharField(blank=True, max_length=200, null=True)), + ( + "customer_id", + models.CharField(blank=True, max_length=200, null=True), + ), ( "available_features", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=64), blank=True, default=list, size=None + base_field=models.CharField(max_length=64), + blank=True, + default=list, + size=None, ), ), ("usage", models.JSONField(blank=True, null=True)), @@ -929,7 +1256,10 @@ class Migration(migrations.Migration): ( "domain_whitelist", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=256), blank=True, default=list, size=None + base_field=models.CharField(max_length=256), + blank=True, + default=list, + size=None, ), ), ], @@ -937,18 +1267,39 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Person", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True)), - ("properties_last_updated_at", models.JSONField(blank=True, default=dict, null=True)), + ( + "properties_last_updated_at", + models.JSONField(blank=True, default=dict, null=True), + ), ("properties_last_operation", models.JSONField(blank=True, null=True)), ("properties", models.JSONField(default=dict)), ("is_identified", models.BooleanField(default=False)), - ("uuid", models.UUIDField(db_index=True, default=posthog.models.utils.UUIDT, editable=False)), + ( + "uuid", + models.UUIDField( + db_index=True, + default=posthog.models.utils.UUIDT, + editable=False, + ), + ), ("version", models.BigIntegerField(blank=True, null=True)), ( "is_user", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, ), ), ], @@ -956,7 +1307,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Plugin", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ( "plugin_type", models.CharField( @@ -974,7 +1333,10 @@ class Migration(migrations.Migration): ), ("is_global", models.BooleanField(default=False)), ("is_preinstalled", models.BooleanField(default=False)), - ("is_stateless", models.BooleanField(blank=True, default=False, null=True)), + ( + "is_stateless", + models.BooleanField(blank=True, default=False, null=True), + ), ("name", models.CharField(blank=True, max_length=200, null=True)), ("description", models.TextField(blank=True, null=True)), ("url", models.CharField(blank=True, max_length=800, null=True)), @@ -1008,7 +1370,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="PluginConfig", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("enabled", models.BooleanField(default=False)), ("order", models.IntegerField()), ("config", models.JSONField(default=dict)), @@ -1016,27 +1386,49 @@ class Migration(migrations.Migration): ("web_token", models.CharField(default=None, max_length=64, null=True)), ("created_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), - ("plugin", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin")), + ( + "plugin", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin"), + ), ], ), migrations.CreateModel( name="Prompt", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("step", models.IntegerField()), ("type", models.CharField(max_length=200)), ("title", models.CharField(max_length=200)), ("text", models.CharField(max_length=1000)), ("placement", models.CharField(default="top", max_length=200)), ("buttons", models.JSONField()), - ("reference", models.CharField(default=None, max_length=200, null=True)), + ( + "reference", + models.CharField(default=None, max_length=200, null=True), + ), ("icon", models.CharField(max_length=200)), ], ), migrations.CreateModel( name="PromptSequence", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=200)), ("type", models.CharField(max_length=200)), ( @@ -1050,7 +1442,10 @@ class Migration(migrations.Migration): ("status", models.CharField(max_length=200)), ("requires_opt_in", models.BooleanField(default=False)), ("autorun", models.BooleanField(default=True)), - ("must_have_completed", models.ManyToManyField(blank=True, to="posthog.PromptSequence")), + ( + "must_have_completed", + models.ManyToManyField(blank=True, to="posthog.PromptSequence"), + ), ("prompts", models.ManyToManyField(to="posthog.Prompt")), ], ), @@ -1060,7 +1455,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=400)), @@ -1086,7 +1484,10 @@ class Migration(migrations.Migration): blank=True, choices=[ ("unix_timestamp", "Unix Timestamp in seconds"), - ("unix_timestamp_milliseconds", "Unix Timestamp in milliseconds"), + ( + "unix_timestamp_milliseconds", + "Unix Timestamp in milliseconds", + ), ("YYYY-MM-DDThh:mm:ssZ", "YYYY-MM-DDThh:mm:ssZ"), ("YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD hh:mm:ss"), ("DD-MM-YYYY hh:mm:ss", "DD-MM-YYYY hh:mm:ss"), @@ -1105,21 +1506,45 @@ class Migration(migrations.Migration): migrations.CreateModel( name="SessionRecordingPlaylist", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("short_id", models.CharField(blank=True, default=posthog.utils.generate_short_id, max_length=12)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "short_id", + models.CharField( + blank=True, + default=posthog.utils.generate_short_id, + max_length=12, + ), + ), ("name", models.CharField(blank=True, max_length=400, null=True)), - ("derived_name", models.CharField(blank=True, max_length=400, null=True)), + ( + "derived_name", + models.CharField(blank=True, max_length=400, null=True), + ), ("description", models.TextField(blank=True)), ("pinned", models.BooleanField(default=False)), ("deleted", models.BooleanField(default=False)), ("filters", models.JSONField(default=dict)), ("created_at", models.DateTimeField(auto_now_add=True)), - ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "last_modified_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ("is_static", models.BooleanField(default=False)), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( @@ -1140,7 +1565,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=255)), @@ -1149,7 +1577,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Team", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ( "uuid", models.UUIDField(default=posthog.models.utils.UUIDT, editable=False), @@ -1161,7 +1597,8 @@ class Migration(migrations.Migration): max_length=200, validators=[ django.core.validators.MinLengthValidator( - 10, "Project's API token must be at least 10 characters long!" + 10, + "Project's API token must be at least 10 characters long!", ) ], ), @@ -1169,7 +1606,10 @@ class Migration(migrations.Migration): ( "app_urls", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=200, null=True), blank=True, default=list, size=None + base_field=models.CharField(max_length=200, null=True), + blank=True, + default=list, + size=None, ), ), ( @@ -1180,21 +1620,36 @@ class Migration(migrations.Migration): validators=[django.core.validators.MinLengthValidator(1, "Project must have a name!")], ), ), - ("slack_incoming_webhook", models.CharField(blank=True, max_length=500, null=True)), + ( + "slack_incoming_webhook", + models.CharField(blank=True, max_length=500, null=True), + ), ("created_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), ("anonymize_ips", models.BooleanField(default=False)), ("completed_snippet_onboarding", models.BooleanField(default=False)), ("ingested_event", models.BooleanField(default=False)), ("session_recording_opt_in", models.BooleanField(default=False)), - ("capture_console_log_opt_in", models.BooleanField(blank=True, null=True)), - ("signup_token", models.CharField(blank=True, max_length=200, null=True)), + ( + "capture_console_log_opt_in", + models.BooleanField(blank=True, null=True), + ), + ( + "signup_token", + models.CharField(blank=True, max_length=200, null=True), + ), ("is_demo", models.BooleanField(default=False)), ("access_control", models.BooleanField(default=False)), ("inject_web_apps", models.BooleanField(null=True)), ("test_account_filters", models.JSONField(default=list)), - ("test_account_filters_default_checked", models.BooleanField(blank=True, null=True)), - ("path_cleaning_filters", models.JSONField(blank=True, default=list, null=True)), + ( + "test_account_filters_default_checked", + models.BooleanField(blank=True, null=True), + ), + ( + "path_cleaning_filters", + models.JSONField(blank=True, default=list, null=True), + ), ( "timezone", models.CharField( @@ -1256,16 +1711,34 @@ class Migration(migrations.Migration): ("America/Anguilla", "America/Anguilla"), ("America/Antigua", "America/Antigua"), ("America/Araguaina", "America/Araguaina"), - ("America/Argentina/Buenos_Aires", "America/Argentina/Buenos_Aires"), - ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), + ( + "America/Argentina/Catamarca", + "America/Argentina/Catamarca", + ), ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), - ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), + ( + "America/Argentina/La_Rioja", + "America/Argentina/La_Rioja", + ), ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), - ("America/Argentina/Rio_Gallegos", "America/Argentina/Rio_Gallegos"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), ("America/Argentina/Salta", "America/Argentina/Salta"), - ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), - ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), + ( + "America/Argentina/San_Juan", + "America/Argentina/San_Juan", + ), + ( + "America/Argentina/San_Luis", + "America/Argentina/San_Luis", + ), ("America/Argentina/Tucuman", "America/Argentina/Tucuman"), ("America/Argentina/Ushuaia", "America/Argentina/Ushuaia"), ("America/Aruba", "America/Aruba"), @@ -1314,10 +1787,16 @@ class Migration(migrations.Migration): ("America/Halifax", "America/Halifax"), ("America/Havana", "America/Havana"), ("America/Hermosillo", "America/Hermosillo"), - ("America/Indiana/Indianapolis", "America/Indiana/Indianapolis"), + ( + "America/Indiana/Indianapolis", + "America/Indiana/Indianapolis", + ), ("America/Indiana/Knox", "America/Indiana/Knox"), ("America/Indiana/Marengo", "America/Indiana/Marengo"), - ("America/Indiana/Petersburg", "America/Indiana/Petersburg"), + ( + "America/Indiana/Petersburg", + "America/Indiana/Petersburg", + ), ("America/Indiana/Tell_City", "America/Indiana/Tell_City"), ("America/Indiana/Vevay", "America/Indiana/Vevay"), ("America/Indiana/Vincennes", "America/Indiana/Vincennes"), @@ -1326,8 +1805,14 @@ class Migration(migrations.Migration): ("America/Iqaluit", "America/Iqaluit"), ("America/Jamaica", "America/Jamaica"), ("America/Juneau", "America/Juneau"), - ("America/Kentucky/Louisville", "America/Kentucky/Louisville"), - ("America/Kentucky/Monticello", "America/Kentucky/Monticello"), + ( + "America/Kentucky/Louisville", + "America/Kentucky/Louisville", + ), + ( + "America/Kentucky/Monticello", + "America/Kentucky/Monticello", + ), ("America/Kralendijk", "America/Kralendijk"), ("America/La_Paz", "America/La_Paz"), ("America/Lima", "America/Lima"), @@ -1354,9 +1839,18 @@ class Migration(migrations.Migration): ("America/Nipigon", "America/Nipigon"), ("America/Nome", "America/Nome"), ("America/Noronha", "America/Noronha"), - ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), - ("America/North_Dakota/Center", "America/North_Dakota/Center"), - ("America/North_Dakota/New_Salem", "America/North_Dakota/New_Salem"), + ( + "America/North_Dakota/Beulah", + "America/North_Dakota/Beulah", + ), + ( + "America/North_Dakota/Center", + "America/North_Dakota/Center", + ), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), ("America/Nuuk", "America/Nuuk"), ("America/Ojinaga", "America/Ojinaga"), ("America/Panama", "America/Panama"), @@ -1643,11 +2137,17 @@ class Migration(migrations.Migration): max_length=240, ), ), - ("data_attributes", models.JSONField(default=posthog.models.team.team.get_default_data_attributes)), + ( + "data_attributes", + models.JSONField(default=posthog.models.team.team.get_default_data_attributes), + ), ( "person_display_name_properties", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=400), blank=True, null=True, size=None + base_field=models.CharField(max_length=400), + blank=True, + null=True, + size=None, ), ), ( @@ -1659,11 +2159,20 @@ class Migration(migrations.Migration): ( "recording_domains", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=200, null=True), blank=True, null=True, size=None + base_field=models.CharField(max_length=200, null=True), + blank=True, + null=True, + size=None, ), ), - ("correlation_config", models.JSONField(blank=True, default=dict, null=True)), - ("session_recording_retention_period_days", models.IntegerField(blank=True, default=None, null=True)), + ( + "correlation_config", + models.JSONField(blank=True, default=dict, null=True), + ), + ( + "session_recording_retention_period_days", + models.IntegerField(blank=True, default=None, null=True), + ), ("plugins_opt_in", models.BooleanField(default=False)), ("opt_out_capture", models.BooleanField(default=False)), ("event_names", models.JSONField(default=list)), @@ -1720,16 +2229,36 @@ class Migration(migrations.Migration): migrations.CreateModel( name="UserPromptState", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("last_updated_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "last_updated_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ("step", models.IntegerField(default=None, null=True)), ("completed", models.BooleanField(default=False)), ("dismissed", models.BooleanField(default=False)), ( "sequence", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.promptsequence"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.promptsequence", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), - ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( @@ -1738,20 +2267,35 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("created_at", models.DateTimeField(auto_now_add=True)), - ("media_location", models.TextField(blank=True, max_length=1000, null=True)), - ("content_type", models.TextField(blank=True, max_length=100, null=True)), + ( + "media_location", + models.TextField(blank=True, max_length=1000, null=True), + ), + ( + "content_type", + models.TextField(blank=True, max_length=100, null=True), + ), ("file_name", models.TextField(blank=True, max_length=1000, null=True)), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], options={ "abstract": False, @@ -1760,13 +2304,27 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Text", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("body", models.CharField(blank=True, max_length=4000, null=True)), - ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "last_modified_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( @@ -1779,7 +2337,10 @@ class Migration(migrations.Migration): to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.CreateModel( @@ -1788,7 +2349,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ( @@ -1844,7 +2408,9 @@ class Migration(migrations.Migration): ( "tag", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="tagged_items", to="posthog.tag" + on_delete=django.db.models.deletion.CASCADE, + related_name="tagged_items", + to="posthog.tag", ), ), ], @@ -1857,12 +2423,25 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Subscription", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("title", models.CharField(blank=True, max_length=100, null=True)), ( "target_type", models.CharField( - choices=[("email", "Email"), ("slack", "Slack"), ("webhook", "Webhook")], max_length=10 + choices=[ + ("email", "Email"), + ("slack", "Slack"), + ("webhook", "Webhook"), + ], + max_length=10, ), ), ("target_value", models.TextField()), @@ -1910,24 +2489,46 @@ class Migration(migrations.Migration): ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( "dashboard", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", + ), ), ( "insight", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.insight", + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), migrations.CreateModel( name="SharingConfiguration", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True)), ("enabled", models.BooleanField(default=False)), ( @@ -1942,29 +2543,65 @@ class Migration(migrations.Migration): ), ( "dashboard", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", + ), ), ( "insight", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.insight", + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), migrations.CreateModel( name="SessionRecordingViewed", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True, null=True)), ("session_id", models.CharField(max_length=200)), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), - ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( name="SessionRecordingPlaylistItem", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("session_id", models.CharField(max_length=200)), ("created_at", models.DateTimeField(auto_now_add=True)), ("deleted", models.BooleanField(blank=True, null=True)), @@ -1986,14 +2623,28 @@ class Migration(migrations.Migration): migrations.CreateModel( name="SessionRecordingEvent", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True, null=True)), - ("timestamp", models.DateTimeField(blank=True, default=django.utils.timezone.now)), + ( + "timestamp", + models.DateTimeField(blank=True, default=django.utils.timezone.now), + ), ("distinct_id", models.CharField(max_length=200)), ("session_id", models.CharField(max_length=200)), ("window_id", models.CharField(blank=True, max_length=200, null=True)), ("snapshot_data", models.JSONField(default=dict)), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddField( @@ -2009,12 +2660,23 @@ class Migration(migrations.Migration): migrations.CreateModel( name="PluginStorage", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=200)), ("value", models.TextField(blank=True, null=True)), ( "plugin_config", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.pluginconfig"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.pluginconfig", + ), ), ], ), @@ -2024,7 +2686,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("filename", models.CharField(max_length=200)), @@ -2032,7 +2697,11 @@ class Migration(migrations.Migration): ( "status", models.CharField( - choices=[("LOCKED", "locked"), ("TRANSPILED", "transpiled"), ("ERROR", "error")], + choices=[ + ("LOCKED", "locked"), + ("TRANSPILED", "transpiled"), + ("ERROR", "error"), + ], max_length=20, null=True, ), @@ -2040,18 +2709,33 @@ class Migration(migrations.Migration): ("transpiled", models.TextField(blank=True, null=True)), ("error", models.TextField(blank=True, null=True)), ("updated_at", models.DateTimeField(blank=True, null=True)), - ("plugin", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin")), + ( + "plugin", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin"), + ), ], ), migrations.AddField( model_name="pluginconfig", name="team", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.team", + ), ), migrations.CreateModel( name="PluginAttachment", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=200)), ("content_type", models.CharField(max_length=200)), ("file_name", models.CharField(max_length=200)), @@ -2060,22 +2744,46 @@ class Migration(migrations.Migration): ( "plugin_config", models.ForeignKey( - null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.pluginconfig" + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.pluginconfig", + ), + ), + ( + "team", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.team", ), ), - ("team", models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), migrations.CreateModel( name="PersonDistinctId", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("distinct_id", models.CharField(max_length=400)), ("version", models.BigIntegerField(blank=True, null=True)), - ("person", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person")), + ( + "person", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person"), + ), ( "team", - models.ForeignKey(db_index=False, on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + models.ForeignKey( + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.team", + ), ), ], ), @@ -2092,8 +2800,20 @@ class Migration(migrations.Migration): ), ), ("label", models.CharField(max_length=40)), - ("value", models.CharField(blank=True, editable=False, max_length=50, null=True, unique=True)), - ("secure_value", models.CharField(editable=False, max_length=300, null=True, unique=True)), + ( + "value", + models.CharField( + blank=True, + editable=False, + max_length=50, + null=True, + unique=True, + ), + ), + ( + "secure_value", + models.CharField(editable=False, max_length=300, null=True, unique=True), + ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("last_used_at", models.DateTimeField(blank=True, null=True)), ( @@ -2127,13 +2847,17 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ( "level", models.PositiveSmallIntegerField( - choices=[(1, "member"), (8, "administrator"), (15, "owner")], default=1 + choices=[(1, "member"), (8, "administrator"), (15, "owner")], + default=1, ), ), ("joined_at", models.DateTimeField(auto_now_add=True)), @@ -2164,10 +2888,16 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), - ("target_email", models.EmailField(db_index=True, max_length=254, null=True)), + ( + "target_email", + models.EmailField(db_index=True, max_length=254, null=True), + ), ("first_name", models.CharField(blank=True, default="", max_length=30)), ("emailing_attempt_made", models.BooleanField(default=False)), ("created_at", models.DateTimeField(auto_now_add=True)), @@ -2203,27 +2933,45 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("domain", models.CharField(max_length=128, unique=True)), ( "verification_challenge", models.CharField( - default=posthog.models.organization_domain.generate_verification_challenge, max_length=128 + default=posthog.models.organization_domain.generate_verification_challenge, + max_length=128, ), ), - ("verified_at", models.DateTimeField(blank=True, default=None, null=True)), - ("last_verification_retry", models.DateTimeField(blank=True, default=None, null=True)), + ( + "verified_at", + models.DateTimeField(blank=True, default=None, null=True), + ), + ( + "last_verification_retry", + models.DateTimeField(blank=True, default=None, null=True), + ), ("jit_provisioning_enabled", models.BooleanField(default=False)), ("sso_enforcement", models.CharField(blank=True, max_length=28)), - ("saml_entity_id", models.CharField(blank=True, max_length=512, null=True)), - ("saml_acs_url", models.CharField(blank=True, max_length=512, null=True)), + ( + "saml_entity_id", + models.CharField(blank=True, max_length=512, null=True), + ), + ( + "saml_acs_url", + models.CharField(blank=True, max_length=512, null=True), + ), ("saml_x509_cert", models.TextField(blank=True, null=True)), ( "organization", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="domains", to="posthog.organization" + on_delete=django.db.models.deletion.CASCADE, + related_name="domains", + to="posthog.organization", ), ), ], @@ -2247,14 +2995,19 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("last_viewed_activity_date", models.DateTimeField(default=None)), ( "user", models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ], @@ -2265,7 +3018,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("email_hash", models.CharField(max_length=1024)), @@ -2280,7 +3036,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Integration", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("kind", models.CharField(choices=[("slack", "Slack")], max_length=10)), ("config", models.JSONField(default=dict)), ("sensitive_config", models.JSONField(default=dict)), @@ -2289,10 +3053,16 @@ class Migration(migrations.Migration): ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddConstraint( @@ -2328,7 +3098,9 @@ class Migration(migrations.Migration): model_name="insightcachingstate", name="insight", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="caching_states", to="posthog.insight" + on_delete=django.db.models.deletion.CASCADE, + related_name="caching_states", + to="posthog.insight", ), ), migrations.AddField( @@ -2340,7 +3112,10 @@ class Migration(migrations.Migration): model_name="insight", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( @@ -2358,7 +3133,10 @@ class Migration(migrations.Migration): model_name="insight", name="dive_dashboard", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="posthog.dashboard" + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="posthog.dashboard", ), ), migrations.AddField( @@ -2426,18 +3204,29 @@ class Migration(migrations.Migration): model_name="exportedasset", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( model_name="exportedasset", name="dashboard", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"), + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", + ), ), migrations.AddField( model_name="exportedasset", name="insight", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"), + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.insight", + ), ), migrations.AddField( model_name="exportedasset", @@ -2488,21 +3277,29 @@ class Migration(migrations.Migration): model_name="element", name="event", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.event" + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.event", ), ), migrations.AddField( model_name="element", name="group", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.elementgroup" + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.elementgroup", ), ), migrations.AddField( model_name="dashboardtile", name="dashboard", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="tiles", to="posthog.dashboard" + on_delete=django.db.models.deletion.CASCADE, + related_name="tiles", + to="posthog.dashboard", ), ), migrations.AddField( @@ -2529,14 +3326,20 @@ class Migration(migrations.Migration): model_name="dashboard", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( model_name="dashboard", name="insights", field=models.ManyToManyField( - blank=True, related_name="dashboards", through="posthog.DashboardTile", to="posthog.Insight" + blank=True, + related_name="dashboards", + through="posthog.DashboardTile", + to="posthog.Insight", ), ), migrations.AddField( @@ -2558,7 +3361,10 @@ class Migration(migrations.Migration): model_name="cohort", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( @@ -2580,27 +3386,39 @@ class Migration(migrations.Migration): model_name="asyncdeletion", name="created_by", field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( model_name="annotation", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( model_name="annotation", name="dashboard_item", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="posthog.insight" + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="posthog.insight", ), ), migrations.AddField( model_name="annotation", name="organization", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.organization"), + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.organization", + ), ), migrations.AddField( model_name="annotation", @@ -2611,21 +3429,28 @@ class Migration(migrations.Migration): model_name="activitylog", name="user", field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( model_name="actionstep", name="action", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="steps", to="posthog.action" + on_delete=django.db.models.deletion.CASCADE, + related_name="steps", + to="posthog.action", ), ), migrations.AddField( model_name="action", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( @@ -2772,7 +3597,16 @@ class Migration(migrations.Migration): ), migrations.AlterUniqueTogether( name="taggeditem", - unique_together={("tag", "dashboard", "insight", "event_definition", "property_definition", "action")}, + unique_together={ + ( + "tag", + "dashboard", + "insight", + "event_definition", + "property_definition", + "action", + ) + }, ), migrations.AlterUniqueTogether( name="tag", @@ -2780,7 +3614,10 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="sessionrecordingviewed", - index=models.Index(fields=["team_id", "user_id", "session_id"], name="posthog_ses_team_id_465af1_idx"), + index=models.Index( + fields=["team_id", "user_id", "session_id"], + name="posthog_ses_team_id_465af1_idx", + ), ), migrations.AlterUniqueTogether( name="sessionrecordingviewed", @@ -2801,14 +3638,17 @@ class Migration(migrations.Migration): migrations.AddIndex( model_name="sessionrecordingevent", index=models.Index( - fields=["team_id", "distinct_id", "timestamp", "session_id"], name="posthog_ses_team_id_46392f_idx" + fields=["team_id", "distinct_id", "timestamp", "session_id"], + name="posthog_ses_team_id_46392f_idx", ), ), TrigramExtension(), migrations.AddIndex( model_name="propertydefinition", index=django.contrib.postgres.indexes.GinIndex( - fields=["name"], name="index_property_definition_name", opclasses=["gin_trgm_ops"] + fields=["name"], + name="index_property_definition_name", + opclasses=["gin_trgm_ops"], ), ), migrations.AddConstraint( @@ -2829,7 +3669,8 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="pluginstorage", constraint=models.UniqueConstraint( - fields=("plugin_config_id", "key"), name="posthog_unique_plugin_storage_key" + fields=("plugin_config_id", "key"), + name="posthog_unique_plugin_storage_key", ), ), migrations.AddConstraint( @@ -2851,13 +3692,16 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="organizationmembership", constraint=models.UniqueConstraint( - fields=("organization_id", "user_id"), name="unique_organization_membership" + fields=("organization_id", "user_id"), + name="unique_organization_membership", ), ), migrations.AddConstraint( model_name="organizationmembership", constraint=models.UniqueConstraint( - condition=models.Q(("level", 15)), fields=("organization_id",), name="only_one_owner_per_organization" + condition=models.Q(("level", 15)), + fields=("organization_id",), + name="only_one_owner_per_organization", ), ), migrations.AddConstraint( @@ -2874,7 +3718,10 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="insightviewed", - index=models.Index(fields=["team_id", "user_id", "-last_viewed_at"], name="posthog_ins_team_id_339ee0_idx"), + index=models.Index( + fields=["team_id", "user_id", "-last_viewed_at"], + name="posthog_ins_team_id_339ee0_idx", + ), ), migrations.AddConstraint( model_name="insightviewed", @@ -2903,13 +3750,15 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="grouptypemapping", constraint=models.UniqueConstraint( - fields=("team", "group_type_index"), name="unique event column indexes for team" + fields=("team", "group_type_index"), + name="unique event column indexes for team", ), ), migrations.AddConstraint( model_name="grouptypemapping", constraint=models.CheckConstraint( - check=models.Q(("group_type_index__lte", 5)), name="group_type_index is less than or equal 5" + check=models.Q(("group_type_index__lte", 5)), + name="group_type_index is less than or equal 5", ), ), migrations.AddConstraint( @@ -2922,13 +3771,15 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="featureflagoverride", constraint=models.UniqueConstraint( - fields=("user", "feature_flag", "team"), name="unique feature flag for a user/team combo" + fields=("user", "feature_flag", "team"), + name="unique feature flag for a user/team combo", ), ), migrations.AddConstraint( model_name="featureflaghashkeyoverride", constraint=models.UniqueConstraint( - fields=("team", "person", "feature_flag_key"), name="Unique hash_key for a user/team/feature_flag combo" + fields=("team", "person", "feature_flag_key"), + name="Unique hash_key for a user/team/feature_flag combo", ), ), migrations.AddConstraint( @@ -2946,13 +3797,16 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="eventproperty", constraint=models.UniqueConstraint( - fields=("team", "event", "property"), name="posthog_event_property_unique_team_event_property" + fields=("team", "event", "property"), + name="posthog_event_property_unique_team_event_property", ), ), migrations.AddIndex( model_name="eventdefinition", index=django.contrib.postgres.indexes.GinIndex( - fields=["name"], name="index_event_definition_name", opclasses=["gin_trgm_ops"] + fields=["name"], + name="index_event_definition_name", + opclasses=["gin_trgm_ops"], ), ), migrations.AlterUniqueTogether( @@ -2965,7 +3819,10 @@ class Migration(migrations.Migration): ), migrations.AddIndex( model_name="event", - index=models.Index(fields=["timestamp", "team_id", "event"], name="posthog_eve_timesta_1f6a8c_idx"), + index=models.Index( + fields=["timestamp", "team_id", "event"], + name="posthog_eve_timesta_1f6a8c_idx", + ), ), migrations.AddConstraint( model_name="elementgroup", @@ -2986,7 +3843,9 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="dashboardtile", constraint=models.UniqueConstraint( - condition=models.Q(("text__isnull", False)), fields=("dashboard", "text"), name="unique_dashboard_text" + condition=models.Q(("text__isnull", False)), + fields=("dashboard", "text"), + name="unique_dashboard_text", ), ), migrations.AddConstraint( @@ -3019,17 +3878,25 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="asyncdeletion", constraint=models.UniqueConstraint( - fields=("deletion_type", "key", "group_type_index"), name="unique deletion for groups" + fields=("deletion_type", "key", "group_type_index"), + name="unique deletion for groups", ), ), migrations.AddIndex( model_name="activitylog", - index=models.Index(fields=["team_id", "scope", "item_id"], name="posthog_act_team_id_13a0a8_idx"), + index=models.Index( + fields=["team_id", "scope", "item_id"], + name="posthog_act_team_id_13a0a8_idx", + ), ), migrations.AddConstraint( model_name="activitylog", constraint=models.CheckConstraint( - check=models.Q(("team_id__isnull", False), ("organization_id__isnull", False), _connector="OR"), + check=models.Q( + ("team_id__isnull", False), + ("organization_id__isnull", False), + _connector="OR", + ), name="must_have_team_or_organization_id", ), ), @@ -3060,7 +3927,9 @@ class Migration(migrations.Migration): model_name="persondistinctid", name="team", field=models.ForeignKey( - db_index=False, on_delete=django.db.models.deletion.CASCADE, to="posthog.team" + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.team", ), ), ], diff --git a/posthog/migrations/0002_person.py b/posthog/migrations/0002_person.py index b7c46ca5c2675..00676ae9e5077 100644 --- a/posthog/migrations/0002_person.py +++ b/posthog/migrations/0002_person.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0001_initial"), ] diff --git a/posthog/migrations/0003_person_is_user.py b/posthog/migrations/0003_person_is_user.py index d894d52ddf69e..e9ebff11822ba 100644 --- a/posthog/migrations/0003_person_is_user.py +++ b/posthog/migrations/0003_person_is_user.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0002_person"), ] diff --git a/posthog/migrations/0004_auto_20200125_0415.py b/posthog/migrations/0004_auto_20200125_0415.py index 2c182399e54d7..7a504bd6a4261 100644 --- a/posthog/migrations/0004_auto_20200125_0415.py +++ b/posthog/migrations/0004_auto_20200125_0415.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0003_person_is_user"), ] diff --git a/posthog/migrations/0005_remove_person_distinct_ids.py b/posthog/migrations/0005_remove_person_distinct_ids.py index 8355585fac3e4..e1b6f7a96f75a 100644 --- a/posthog/migrations/0005_remove_person_distinct_ids.py +++ b/posthog/migrations/0005_remove_person_distinct_ids.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0004_auto_20200125_0415"), ] diff --git a/posthog/migrations/0006_person_distinct_ids.py b/posthog/migrations/0006_person_distinct_ids.py index 4c13b697fa97b..c193d33a9916e 100644 --- a/posthog/migrations/0006_person_distinct_ids.py +++ b/posthog/migrations/0006_person_distinct_ids.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0005_remove_person_distinct_ids"), ] diff --git a/posthog/migrations/0007_element.py b/posthog/migrations/0007_element.py index ca419df97b1cc..e190ac2f5bec5 100644 --- a/posthog/migrations/0007_element.py +++ b/posthog/migrations/0007_element.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0006_person_distinct_ids"), ] diff --git a/posthog/migrations/0008_action_actionstep.py b/posthog/migrations/0008_action_actionstep.py index 4720db487a565..1d17b8f064470 100644 --- a/posthog/migrations/0008_action_actionstep.py +++ b/posthog/migrations/0008_action_actionstep.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0007_element"), ] diff --git a/posthog/migrations/0009_auto_20200127_0018.py b/posthog/migrations/0009_auto_20200127_0018.py index 3319c69a0cb13..8828294ee9399 100644 --- a/posthog/migrations/0009_auto_20200127_0018.py +++ b/posthog/migrations/0009_auto_20200127_0018.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0008_action_actionstep"), ] diff --git a/posthog/migrations/0010_funnel_funnelstep.py b/posthog/migrations/0010_funnel_funnelstep.py index 2d7d45f7ed572..3bcafa4035942 100644 --- a/posthog/migrations/0010_funnel_funnelstep.py +++ b/posthog/migrations/0010_funnel_funnelstep.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0009_auto_20200127_0018"), ] diff --git a/posthog/migrations/0011_auto_20200127_2105.py b/posthog/migrations/0011_auto_20200127_2105.py index b52911c657af8..c9f83a19c57b4 100644 --- a/posthog/migrations/0011_auto_20200127_2105.py +++ b/posthog/migrations/0011_auto_20200127_2105.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0010_funnel_funnelstep"), ] diff --git a/posthog/migrations/0012_team_app_url.py b/posthog/migrations/0012_team_app_url.py index dd087ab0d35fe..c6916c0818f36 100644 --- a/posthog/migrations/0012_team_app_url.py +++ b/posthog/migrations/0012_team_app_url.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0011_auto_20200127_2105"), ] diff --git a/posthog/migrations/0013_element_attr_class.py b/posthog/migrations/0013_element_attr_class.py index 44d25b77c683b..b5ec08787553e 100644 --- a/posthog/migrations/0013_element_attr_class.py +++ b/posthog/migrations/0013_element_attr_class.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0012_team_app_url"), ] diff --git a/posthog/migrations/0014_auto_20200129_0703.py b/posthog/migrations/0014_auto_20200129_0703.py index d936899632ca3..4be8151c42872 100644 --- a/posthog/migrations/0014_auto_20200129_0703.py +++ b/posthog/migrations/0014_auto_20200129_0703.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0013_element_attr_class"), ] diff --git a/posthog/migrations/0015_actionstep_event.py b/posthog/migrations/0015_actionstep_event.py index d2eef953a2311..7b2927a2813b2 100644 --- a/posthog/migrations/0015_actionstep_event.py +++ b/posthog/migrations/0015_actionstep_event.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0014_auto_20200129_0703"), ] diff --git a/posthog/migrations/0016_user_temporary_token.py b/posthog/migrations/0016_user_temporary_token.py index 926e5aaef0bbd..d2bcda6de4e23 100644 --- a/posthog/migrations/0016_user_temporary_token.py +++ b/posthog/migrations/0016_user_temporary_token.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0015_actionstep_event"), ] diff --git a/posthog/migrations/0017_dashboarditem.py b/posthog/migrations/0017_dashboarditem.py index b6ee906d4c455..51eb088ba2840 100644 --- a/posthog/migrations/0017_dashboarditem.py +++ b/posthog/migrations/0017_dashboarditem.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0016_user_temporary_token"), ] diff --git a/posthog/migrations/0018_funnel_deleted.py b/posthog/migrations/0018_funnel_deleted.py index 6560947c7d27b..43f596fcf7376 100644 --- a/posthog/migrations/0018_funnel_deleted.py +++ b/posthog/migrations/0018_funnel_deleted.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0017_dashboarditem"), ] diff --git a/posthog/migrations/0019_team_name.py b/posthog/migrations/0019_team_name.py index b3328cc23fb81..5b73755da2f73 100644 --- a/posthog/migrations/0019_team_name.py +++ b/posthog/migrations/0019_team_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0018_funnel_deleted"), ] diff --git a/posthog/migrations/0020_auto_20200210_0212.py b/posthog/migrations/0020_auto_20200210_0212.py index c5737413424f1..f9278fecde9a9 100644 --- a/posthog/migrations/0020_auto_20200210_0212.py +++ b/posthog/migrations/0020_auto_20200210_0212.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0019_team_name"), ] diff --git a/posthog/migrations/0021_user_distinct_id.py b/posthog/migrations/0021_user_distinct_id.py index 062702ddbdc03..95881bcc9225d 100644 --- a/posthog/migrations/0021_user_distinct_id.py +++ b/posthog/migrations/0021_user_distinct_id.py @@ -18,7 +18,6 @@ def reverse_func(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0020_auto_20200210_0212"), ] diff --git a/posthog/migrations/0022_action_deleted.py b/posthog/migrations/0022_action_deleted.py index a9878f6b01754..d9340dc0be1ea 100644 --- a/posthog/migrations/0022_action_deleted.py +++ b/posthog/migrations/0022_action_deleted.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0021_user_distinct_id"), ] diff --git a/posthog/migrations/0023_team_opt_out_capture.py b/posthog/migrations/0023_team_opt_out_capture.py index ecd04b3b91dda..25dab1b607a4c 100644 --- a/posthog/migrations/0023_team_opt_out_capture.py +++ b/posthog/migrations/0023_team_opt_out_capture.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0022_action_deleted"), ] diff --git a/posthog/migrations/0025_cohort.py b/posthog/migrations/0025_cohort.py index 06f93085d95a9..e330d91cb45ce 100644 --- a/posthog/migrations/0025_cohort.py +++ b/posthog/migrations/0025_cohort.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0024_add_event_distinct_id_index"), ] diff --git a/posthog/migrations/0027_move_elements_to_group.py b/posthog/migrations/0027_move_elements_to_group.py index a52d14506c71a..51a65b1f5da39 100644 --- a/posthog/migrations/0027_move_elements_to_group.py +++ b/posthog/migrations/0027_move_elements_to_group.py @@ -61,5 +61,10 @@ class Migration(migrations.Migration): ] operations = [ - migrations.RunPython(forwards, reverse_code=backwards, hints={"target_db": "default"}, elidable=True), + migrations.RunPython( + forwards, + reverse_code=backwards, + hints={"target_db": "default"}, + elidable=True, + ), ] diff --git a/posthog/migrations/0028_actionstep_url_matching.py b/posthog/migrations/0028_actionstep_url_matching.py index a127710db6dbf..4100495be04e0 100644 --- a/posthog/migrations/0028_actionstep_url_matching.py +++ b/posthog/migrations/0028_actionstep_url_matching.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0027_move_elements_to_group"), ] diff --git a/posthog/migrations/0029_migrate_dashboard_actions.py b/posthog/migrations/0029_migrate_dashboard_actions.py index 89e8eea0ecbc9..7ddce5a0bce9d 100644 --- a/posthog/migrations/0029_migrate_dashboard_actions.py +++ b/posthog/migrations/0029_migrate_dashboard_actions.py @@ -18,7 +18,6 @@ def migrate_to_array(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0028_actionstep_url_matching"), ] diff --git a/posthog/migrations/0030_migrate_dashboard_days.py b/posthog/migrations/0030_migrate_dashboard_days.py index 4edf8c1a38633..a3516a251ef6e 100644 --- a/posthog/migrations/0030_migrate_dashboard_days.py +++ b/posthog/migrations/0030_migrate_dashboard_days.py @@ -18,7 +18,6 @@ def migrate_to_array(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0029_migrate_dashboard_actions"), ] diff --git a/posthog/migrations/0031_team_signup_token.py b/posthog/migrations/0031_team_signup_token.py index 63b6e208d5322..3e8a66ff19931 100644 --- a/posthog/migrations/0031_team_signup_token.py +++ b/posthog/migrations/0031_team_signup_token.py @@ -17,7 +17,6 @@ def backwards(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0030_migrate_dashboard_days"), ] diff --git a/posthog/migrations/0032_team_multiple_app_urls.py b/posthog/migrations/0032_team_multiple_app_urls.py index 7efbf6b31218c..13173c75b6376 100644 --- a/posthog/migrations/0032_team_multiple_app_urls.py +++ b/posthog/migrations/0032_team_multiple_app_urls.py @@ -5,7 +5,6 @@ def migrate_to_array(apps, schema_editor): - Team = apps.get_model("posthog", "Team") for mm in Team.objects.all(): @@ -14,7 +13,6 @@ def migrate_to_array(apps, schema_editor): def rollback_to_string(apps, schema_editor): - Team = apps.get_model("posthog", "Team") for mm in Team.objects.all(): @@ -23,7 +21,6 @@ def rollback_to_string(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0031_team_signup_token"), ] diff --git a/posthog/migrations/0033_auto_20200316_1655.py b/posthog/migrations/0033_auto_20200316_1655.py index b51694a2941b3..83fa47260ad7a 100644 --- a/posthog/migrations/0033_auto_20200316_1655.py +++ b/posthog/migrations/0033_auto_20200316_1655.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0032_team_multiple_app_urls"), ] diff --git a/posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py b/posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py index 4fa956d1b61be..a48ff23a7ffc5 100644 --- a/posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py +++ b/posthog/migrations/0034_pg_trgm_and_btree_20200318_1447.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0033_auto_20200316_1655"), ] diff --git a/posthog/migrations/0036_remove_current_url_index.py b/posthog/migrations/0036_remove_current_url_index.py index a705491e1bb18..00d8ed6871125 100644 --- a/posthog/migrations/0036_remove_current_url_index.py +++ b/posthog/migrations/0036_remove_current_url_index.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0035_current_url_index_20200318_1459"), ] diff --git a/posthog/migrations/0037_action_step_url_matching_can_be_null_20200402_1351.py b/posthog/migrations/0037_action_step_url_matching_can_be_null_20200402_1351.py index 66be9b4b9bc3a..b69d237cf84ec 100644 --- a/posthog/migrations/0037_action_step_url_matching_can_be_null_20200402_1351.py +++ b/posthog/migrations/0037_action_step_url_matching_can_be_null_20200402_1351.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0036_remove_current_url_index"), ] diff --git a/posthog/migrations/0038_migrate_actions_to_precalculate_events.py b/posthog/migrations/0038_migrate_actions_to_precalculate_events.py index 172ce9d3e81ef..3cbb4c8e06082 100644 --- a/posthog/migrations/0038_migrate_actions_to_precalculate_events.py +++ b/posthog/migrations/0038_migrate_actions_to_precalculate_events.py @@ -12,7 +12,6 @@ def rollback(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0037_action_step_url_matching_can_be_null_20200402_1351"), ] diff --git a/posthog/migrations/0039_populate_event_ip_property.py b/posthog/migrations/0039_populate_event_ip_property.py index 9fb98ba2a7f30..d1e4166a1982e 100644 --- a/posthog/migrations/0039_populate_event_ip_property.py +++ b/posthog/migrations/0039_populate_event_ip_property.py @@ -2,7 +2,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0038_migrate_actions_to_precalculate_events"), ] diff --git a/posthog/migrations/0039_user_email_opt_in.py b/posthog/migrations/0039_user_email_opt_in.py index f5132cde4b4f6..3f8a25572715b 100644 --- a/posthog/migrations/0039_user_email_opt_in.py +++ b/posthog/migrations/0039_user_email_opt_in.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0038_migrate_actions_to_precalculate_events"), ] diff --git a/posthog/migrations/0040_remove_event_ip.py b/posthog/migrations/0040_remove_event_ip.py index 741cf5232a3df..944ae3c63c847 100644 --- a/posthog/migrations/0040_remove_event_ip.py +++ b/posthog/migrations/0040_remove_event_ip.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0039_populate_event_ip_property"), ] diff --git a/posthog/migrations/0041_merge_20200407_1805.py b/posthog/migrations/0041_merge_20200407_1805.py index aa209ddaadd9a..a73e12aa07488 100644 --- a/posthog/migrations/0041_merge_20200407_1805.py +++ b/posthog/migrations/0041_merge_20200407_1805.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0040_remove_event_ip"), ("posthog", "0039_user_email_opt_in"), diff --git a/posthog/migrations/0042_add_type_dashboarditems.py b/posthog/migrations/0042_add_type_dashboarditems.py index 37728e73ebb87..33a721640d5e8 100644 --- a/posthog/migrations/0042_add_type_dashboarditems.py +++ b/posthog/migrations/0042_add_type_dashboarditems.py @@ -33,7 +33,6 @@ def reverse_filter_types(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0041_merge_20200407_1805"), ] diff --git a/posthog/migrations/0043_slack_webhooks.py b/posthog/migrations/0043_slack_webhooks.py index bc68ade92b1e5..097da3742916d 100644 --- a/posthog/migrations/0043_slack_webhooks.py +++ b/posthog/migrations/0043_slack_webhooks.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0042_add_type_dashboarditems"), ] diff --git a/posthog/migrations/0044_auto_20200413_1936.py b/posthog/migrations/0044_auto_20200413_1936.py index eda91a6b76ae0..fdda5eaf560ec 100644 --- a/posthog/migrations/0044_auto_20200413_1936.py +++ b/posthog/migrations/0044_auto_20200413_1936.py @@ -33,7 +33,6 @@ def revert_funnel_steps(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0043_slack_webhooks"), ] diff --git a/posthog/migrations/0045_add_timestamp_index.py b/posthog/migrations/0045_add_timestamp_index.py index 497a8dca04f7d..b6598fe802492 100644 --- a/posthog/migrations/0045_add_timestamp_index.py +++ b/posthog/migrations/0045_add_timestamp_index.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0044_auto_20200413_1936"), ] diff --git a/posthog/migrations/0046_event_names_properties_to_team.py b/posthog/migrations/0046_event_names_properties_to_team.py index a90b2c2e1c526..7350a5a8de701 100644 --- a/posthog/migrations/0046_event_names_properties_to_team.py +++ b/posthog/migrations/0046_event_names_properties_to_team.py @@ -30,7 +30,6 @@ def noop(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0045_add_timestamp_index"), ] diff --git a/posthog/migrations/0047_auto_20200416_1631.py b/posthog/migrations/0047_auto_20200416_1631.py index 2e2c359df1d3b..fd236cf551358 100644 --- a/posthog/migrations/0047_auto_20200416_1631.py +++ b/posthog/migrations/0047_auto_20200416_1631.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0046_event_names_properties_to_team"), ] diff --git a/posthog/migrations/0048_auto_20200420_1051.py b/posthog/migrations/0048_auto_20200420_1051.py index e29ed78cc695f..29248b69fb693 100644 --- a/posthog/migrations/0048_auto_20200420_1051.py +++ b/posthog/migrations/0048_auto_20200420_1051.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0047_auto_20200416_1631"), ] diff --git a/posthog/migrations/0049_delete_funnelstep.py b/posthog/migrations/0049_delete_funnelstep.py index a29722f0e313a..e66988922c931 100644 --- a/posthog/migrations/0049_delete_funnelstep.py +++ b/posthog/migrations/0049_delete_funnelstep.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0048_auto_20200420_1051"), ] diff --git a/posthog/migrations/0050_dashboards.py b/posthog/migrations/0050_dashboards.py index 8d74bcf7394e7..70d1559591be7 100644 --- a/posthog/migrations/0050_dashboards.py +++ b/posthog/migrations/0050_dashboards.py @@ -26,7 +26,6 @@ def backwards(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0049_delete_funnelstep"), ] @@ -73,7 +72,12 @@ class Migration(migrations.Migration): to="posthog.Dashboard", ), ), - migrations.RunPython(forwards, reverse_code=backwards, hints={"target_db": "default"}, elidable=True), + migrations.RunPython( + forwards, + reverse_code=backwards, + hints={"target_db": "default"}, + elidable=True, + ), migrations.AlterField( model_name="dashboarditem", name="dashboard", diff --git a/posthog/migrations/0051_precalculate_cohorts.py b/posthog/migrations/0051_precalculate_cohorts.py index 4e4f2224c6927..0d2e2f83981b1 100644 --- a/posthog/migrations/0051_precalculate_cohorts.py +++ b/posthog/migrations/0051_precalculate_cohorts.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0050_dashboards"), ] diff --git a/posthog/migrations/0052_data_precalculate_cohorts.py b/posthog/migrations/0052_data_precalculate_cohorts.py index b6e01cd1e2a42..d2c7990399a2b 100644 --- a/posthog/migrations/0052_data_precalculate_cohorts.py +++ b/posthog/migrations/0052_data_precalculate_cohorts.py @@ -15,7 +15,6 @@ def backwards(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0051_precalculate_cohorts"), ] diff --git a/posthog/migrations/0053_dashboard_item_layouts.py b/posthog/migrations/0053_dashboard_item_layouts.py index 77fcfa4df7275..f514a48fc5352 100644 --- a/posthog/migrations/0053_dashboard_item_layouts.py +++ b/posthog/migrations/0053_dashboard_item_layouts.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0052_data_precalculate_cohorts"), ] diff --git a/posthog/migrations/0054_dashboard_item_color.py b/posthog/migrations/0054_dashboard_item_color.py index 052811a71b0c0..1ba803e0baa84 100644 --- a/posthog/migrations/0054_dashboard_item_color.py +++ b/posthog/migrations/0054_dashboard_item_color.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0053_dashboard_item_layouts"), ] diff --git a/posthog/migrations/0055_user_anonymize_data.py b/posthog/migrations/0055_user_anonymize_data.py index cea3694da7149..412e2f08666b9 100644 --- a/posthog/migrations/0055_user_anonymize_data.py +++ b/posthog/migrations/0055_user_anonymize_data.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0054_dashboard_item_color"), ] diff --git a/posthog/migrations/0056_auto_20200522_1024.py b/posthog/migrations/0056_auto_20200522_1024.py index 861d269961096..ae09c0519a391 100644 --- a/posthog/migrations/0056_auto_20200522_1024.py +++ b/posthog/migrations/0056_auto_20200522_1024.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0055_user_anonymize_data"), ] diff --git a/posthog/migrations/0057_action_updated_at.py b/posthog/migrations/0057_action_updated_at.py index 2cdf65ea2b683..1acfa631c6425 100644 --- a/posthog/migrations/0057_action_updated_at.py +++ b/posthog/migrations/0057_action_updated_at.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0056_auto_20200522_1024"), ] diff --git a/posthog/migrations/0058_dashboarditem_last_refresh.py b/posthog/migrations/0058_dashboarditem_last_refresh.py index ad71e982e45ca..428995375c6e4 100644 --- a/posthog/migrations/0058_dashboarditem_last_refresh.py +++ b/posthog/migrations/0058_dashboarditem_last_refresh.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0057_action_updated_at"), ] diff --git a/posthog/migrations/0059_dashboarditem_refreshing.py b/posthog/migrations/0059_dashboarditem_refreshing.py index f5e82621b32a0..1a873e17817b2 100644 --- a/posthog/migrations/0059_dashboarditem_refreshing.py +++ b/posthog/migrations/0059_dashboarditem_refreshing.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0058_dashboarditem_last_refresh"), ] diff --git a/posthog/migrations/0060_auto_20200616_0746.py b/posthog/migrations/0060_auto_20200616_0746.py index 3c6acbdc31a2a..cfea859ea3f47 100644 --- a/posthog/migrations/0060_auto_20200616_0746.py +++ b/posthog/migrations/0060_auto_20200616_0746.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0059_dashboarditem_refreshing"), ] diff --git a/posthog/migrations/0061_featureflag.py b/posthog/migrations/0061_featureflag.py index ee2456e8c172c..d7a05ea799558 100644 --- a/posthog/migrations/0061_featureflag.py +++ b/posthog/migrations/0061_featureflag.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0060_auto_20200616_0746"), ] diff --git a/posthog/migrations/0062_team_anonymize_ips.py b/posthog/migrations/0062_team_anonymize_ips.py index cdd7fd305169f..d5234b8b47310 100644 --- a/posthog/migrations/0062_team_anonymize_ips.py +++ b/posthog/migrations/0062_team_anonymize_ips.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0061_featureflag"), ] diff --git a/posthog/migrations/0063_team_completed_snippet_onboarding.py b/posthog/migrations/0063_team_completed_snippet_onboarding.py index d9341aead8321..1a3819af10dae 100644 --- a/posthog/migrations/0063_team_completed_snippet_onboarding.py +++ b/posthog/migrations/0063_team_completed_snippet_onboarding.py @@ -17,7 +17,6 @@ def backwards(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0062_team_anonymize_ips"), ] diff --git a/posthog/migrations/0064_toolbar_mode.py b/posthog/migrations/0064_toolbar_mode.py index e8031d315fc79..566ad520e284e 100644 --- a/posthog/migrations/0064_toolbar_mode.py +++ b/posthog/migrations/0064_toolbar_mode.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0063_team_completed_snippet_onboarding"), ] diff --git a/posthog/migrations/0065_auto_20200624_1842.py b/posthog/migrations/0065_auto_20200624_1842.py index c44d120ca2517..6941eb7672045 100644 --- a/posthog/migrations/0065_auto_20200624_1842.py +++ b/posthog/migrations/0065_auto_20200624_1842.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0064_toolbar_mode"), ] diff --git a/posthog/migrations/0066_team_created_at.py b/posthog/migrations/0066_team_created_at.py index e2b76d8be195c..0e9b0f2f0c4bd 100644 --- a/posthog/migrations/0066_team_created_at.py +++ b/posthog/migrations/0066_team_created_at.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0065_auto_20200624_1842"), ] diff --git a/posthog/migrations/0067_team_updated_at.py b/posthog/migrations/0067_team_updated_at.py index 31dfbb25225ed..5545097a96e05 100644 --- a/posthog/migrations/0067_team_updated_at.py +++ b/posthog/migrations/0067_team_updated_at.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0066_team_created_at"), ] diff --git a/posthog/migrations/0068_auto_20200629_1322.py b/posthog/migrations/0068_auto_20200629_1322.py index 216a5c3f1c499..fc6c23eb79aa5 100644 --- a/posthog/migrations/0068_auto_20200629_1322.py +++ b/posthog/migrations/0068_auto_20200629_1322.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0067_team_updated_at"), ] diff --git a/posthog/migrations/0069_auto_20200714_1642.py b/posthog/migrations/0069_auto_20200714_1642.py index ff7cb3d92860a..a9a8091b0d32e 100644 --- a/posthog/migrations/0069_auto_20200714_1642.py +++ b/posthog/migrations/0069_auto_20200714_1642.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0068_auto_20200629_1322"), ] diff --git a/posthog/migrations/0070_team_event_properties_numerical.py b/posthog/migrations/0070_team_event_properties_numerical.py index 3b0d300c5785a..032e9a336179f 100644 --- a/posthog/migrations/0070_team_event_properties_numerical.py +++ b/posthog/migrations/0070_team_event_properties_numerical.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0069_auto_20200714_1642"), ] diff --git a/posthog/migrations/0071_cache_dashboard_items.py b/posthog/migrations/0071_cache_dashboard_items.py index 909daf452e801..9b9d1b2b72a78 100644 --- a/posthog/migrations/0071_cache_dashboard_items.py +++ b/posthog/migrations/0071_cache_dashboard_items.py @@ -30,7 +30,6 @@ def reverse_func(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0070_team_event_properties_numerical"), ] diff --git a/posthog/migrations/0072_action_step_url_matching_regex.py b/posthog/migrations/0072_action_step_url_matching_regex.py index e7a624c3e06fb..2afb583ce5097 100644 --- a/posthog/migrations/0072_action_step_url_matching_regex.py +++ b/posthog/migrations/0072_action_step_url_matching_regex.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0071_cache_dashboard_items"), ] diff --git a/posthog/migrations/0073_update_dashboard_item_filters.py b/posthog/migrations/0073_update_dashboard_item_filters.py index 3f204ce499a30..d4310df88ef1c 100644 --- a/posthog/migrations/0073_update_dashboard_item_filters.py +++ b/posthog/migrations/0073_update_dashboard_item_filters.py @@ -20,7 +20,6 @@ def reverse_func(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0072_action_step_url_matching_regex"), ] diff --git a/posthog/migrations/0074_toolbar_default_on.py b/posthog/migrations/0074_toolbar_default_on.py index 2804bc5edb060..9fcc672bb598f 100644 --- a/posthog/migrations/0074_toolbar_default_on.py +++ b/posthog/migrations/0074_toolbar_default_on.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0073_update_dashboard_item_filters"), ] diff --git a/posthog/migrations/0075_action_slack_message_format.py b/posthog/migrations/0075_action_slack_message_format.py index 7404c7965efd9..9eb7d8c7cda26 100644 --- a/posthog/migrations/0075_action_slack_message_format.py +++ b/posthog/migrations/0075_action_slack_message_format.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0074_toolbar_default_on"), ] diff --git a/posthog/migrations/0076_auto_20200819_1214.py b/posthog/migrations/0076_auto_20200819_1214.py index d450022fc29bc..a02021d092451 100644 --- a/posthog/migrations/0076_auto_20200819_1214.py +++ b/posthog/migrations/0076_auto_20200819_1214.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0075_action_slack_message_format"), ] diff --git a/posthog/migrations/0077_cohortpeople_id_to_bigautofield.py b/posthog/migrations/0077_cohortpeople_id_to_bigautofield.py index 4c80a3f2f2469..ae77683b0ce12 100644 --- a/posthog/migrations/0077_cohortpeople_id_to_bigautofield.py +++ b/posthog/migrations/0077_cohortpeople_id_to_bigautofield.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0076_auto_20200819_1214"), ] diff --git a/posthog/migrations/0078_auto_20200731_1323.py b/posthog/migrations/0078_auto_20200731_1323.py index 0d171ef26858e..0a67bdd4874ad 100644 --- a/posthog/migrations/0078_auto_20200731_1323.py +++ b/posthog/migrations/0078_auto_20200731_1323.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0077_cohortpeople_id_to_bigautofield"), ] diff --git a/posthog/migrations/0079_move_funnels_to_insights.py b/posthog/migrations/0079_move_funnels_to_insights.py index 9a381f2af3b26..d4466b0cccb5f 100644 --- a/posthog/migrations/0079_move_funnels_to_insights.py +++ b/posthog/migrations/0079_move_funnels_to_insights.py @@ -39,7 +39,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0078_auto_20200731_1323"), ] diff --git a/posthog/migrations/0080_update_dashboard_funnel_filters.py b/posthog/migrations/0080_update_dashboard_funnel_filters.py index db3f6771e8ff6..09bdafafbb262 100644 --- a/posthog/migrations/0080_update_dashboard_funnel_filters.py +++ b/posthog/migrations/0080_update_dashboard_funnel_filters.py @@ -37,7 +37,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0079_move_funnels_to_insights"), ] diff --git a/posthog/migrations/0081_person_is_identified.py b/posthog/migrations/0081_person_is_identified.py index d5b37b3255d6c..40443fd8b91ff 100644 --- a/posthog/migrations/0081_person_is_identified.py +++ b/posthog/migrations/0081_person_is_identified.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0080_update_dashboard_funnel_filters"), ] diff --git a/posthog/migrations/0082_personalapikey.py b/posthog/migrations/0082_personalapikey.py index ce4ad061c3928..95520b06dd452 100644 --- a/posthog/migrations/0082_personalapikey.py +++ b/posthog/migrations/0082_personalapikey.py @@ -9,7 +9,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0081_person_is_identified"), ] diff --git a/posthog/migrations/0083_auto_20200826_1504.py b/posthog/migrations/0083_auto_20200826_1504.py index aa0ba2b90251e..07ccc49fc93a9 100644 --- a/posthog/migrations/0083_auto_20200826_1504.py +++ b/posthog/migrations/0083_auto_20200826_1504.py @@ -15,7 +15,6 @@ def create_uuid(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0082_personalapikey"), ] diff --git a/posthog/migrations/0084_person_uuid.py b/posthog/migrations/0084_person_uuid.py index f32f3e14ae3a9..f6bd7480de94e 100644 --- a/posthog/migrations/0084_person_uuid.py +++ b/posthog/migrations/0084_person_uuid.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0083_auto_20200826_1504"), ] diff --git a/posthog/migrations/0085_org_models.py b/posthog/migrations/0085_org_models.py index 25e8b90aefd2c..7483d43a62394 100644 --- a/posthog/migrations/0085_org_models.py +++ b/posthog/migrations/0085_org_models.py @@ -51,7 +51,6 @@ def reverse_func(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0084_person_uuid"), ] diff --git a/posthog/migrations/0086_team_session_recording_opt_in.py b/posthog/migrations/0086_team_session_recording_opt_in.py index f26a94a383e7b..4fcd8c23c6d47 100644 --- a/posthog/migrations/0086_team_session_recording_opt_in.py +++ b/posthog/migrations/0086_team_session_recording_opt_in.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0085_org_models"), ] diff --git a/posthog/migrations/0087_fix_annotation_created_at.py b/posthog/migrations/0087_fix_annotation_created_at.py index 0c4483174d1c4..3b77ccea233ae 100644 --- a/posthog/migrations/0087_fix_annotation_created_at.py +++ b/posthog/migrations/0087_fix_annotation_created_at.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0086_team_session_recording_opt_in"), ] diff --git a/posthog/migrations/0088_toolbar_disabled.py b/posthog/migrations/0088_toolbar_disabled.py index 071ba75c8dcfd..64a2f05a274b5 100644 --- a/posthog/migrations/0088_toolbar_disabled.py +++ b/posthog/migrations/0088_toolbar_disabled.py @@ -15,7 +15,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0087_fix_annotation_created_at"), ] diff --git a/posthog/migrations/0089_auto_20201015_1031.py b/posthog/migrations/0089_auto_20201015_1031.py index 72b6195c5d0d7..f19ad04b047d2 100644 --- a/posthog/migrations/0089_auto_20201015_1031.py +++ b/posthog/migrations/0089_auto_20201015_1031.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0088_toolbar_disabled"), ] diff --git a/posthog/migrations/0090_org_live.py b/posthog/migrations/0090_org_live.py index 79dd818bd2d86..d63d3f545de00 100644 --- a/posthog/migrations/0090_org_live.py +++ b/posthog/migrations/0090_org_live.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0089_auto_20201015_1031"), ] @@ -29,14 +28,19 @@ class Migration(migrations.Migration): model_name="team", name="api_token", field=models.CharField( - default=posthog.models.utils.generate_random_token, max_length=200, null=True, unique=True + default=posthog.models.utils.generate_random_token, + max_length=200, + null=True, + unique=True, ), ), migrations.AlterField( model_name="team", name="users", field=models.ManyToManyField( - blank=True, related_name="teams_deprecated_relationship", to=settings.AUTH_USER_MODEL + blank=True, + related_name="teams_deprecated_relationship", + to=settings.AUTH_USER_MODEL, ), ), migrations.AlterField( diff --git a/posthog/migrations/0091_messagingrecord.py b/posthog/migrations/0091_messagingrecord.py index f00d2ee1da4b5..669c582ed6893 100644 --- a/posthog/migrations/0091_messagingrecord.py +++ b/posthog/migrations/0091_messagingrecord.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0090_org_live"), ] @@ -18,7 +17,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("email_hash", models.CharField(max_length=1024)), diff --git a/posthog/migrations/0093_remove_user_is_superuser.py b/posthog/migrations/0093_remove_user_is_superuser.py index 4b797ea341ab8..c56685c477ea8 100644 --- a/posthog/migrations/0093_remove_user_is_superuser.py +++ b/posthog/migrations/0093_remove_user_is_superuser.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0092_rename_projects_to_default"), ] diff --git a/posthog/migrations/0094_description_on_dashboard_items.py b/posthog/migrations/0094_description_on_dashboard_items.py index 612b57160c044..24dde3b3926f4 100644 --- a/posthog/migrations/0094_description_on_dashboard_items.py +++ b/posthog/migrations/0094_description_on_dashboard_items.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0093_remove_user_is_superuser"), ] diff --git a/posthog/migrations/0095_session_recording_event_table.py b/posthog/migrations/0095_session_recording_event_table.py index a972e792e3041..94b9d9848dc23 100644 --- a/posthog/migrations/0095_session_recording_event_table.py +++ b/posthog/migrations/0095_session_recording_event_table.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0094_description_on_dashboard_items"), ] @@ -16,13 +15,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name="SessionRecordingEvent", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True, null=True)), - ("timestamp", models.DateTimeField(blank=True, default=django.utils.timezone.now)), + ( + "timestamp", + models.DateTimeField(blank=True, default=django.utils.timezone.now), + ), ("session_id", models.CharField(max_length=200)), ("distinct_id", models.CharField(max_length=200)), - ("snapshot_data", django.contrib.postgres.fields.jsonb.JSONField(default=dict)), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team")), + ( + "snapshot_data", + django.contrib.postgres.fields.jsonb.JSONField(default=dict), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team"), + ), ], ), migrations.AddIndex( @@ -32,7 +48,8 @@ class Migration(migrations.Migration): migrations.AddIndex( model_name="sessionrecordingevent", index=models.Index( - fields=["team_id", "distinct_id", "timestamp", "session_id"], name="posthog_ses_team_id_46392f_idx" + fields=["team_id", "distinct_id", "timestamp", "session_id"], + name="posthog_ses_team_id_46392f_idx", ), ), ] diff --git a/posthog/migrations/0096_plugins.py b/posthog/migrations/0096_plugins.py index 765e13e122a65..8ae7167596371 100644 --- a/posthog/migrations/0096_plugins.py +++ b/posthog/migrations/0096_plugins.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0095_session_recording_event_table"), ] diff --git a/posthog/migrations/0097_invite_emails.py b/posthog/migrations/0097_invite_emails.py index f12ac859111ef..ff6aa476f5e30 100644 --- a/posthog/migrations/0097_invite_emails.py +++ b/posthog/migrations/0097_invite_emails.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0096_plugins"), ] diff --git a/posthog/migrations/0098_events_property_usage.py b/posthog/migrations/0098_events_property_usage.py index ed45de9c99645..99f56f0bb1387 100644 --- a/posthog/migrations/0098_events_property_usage.py +++ b/posthog/migrations/0098_events_property_usage.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0097_invite_emails"), ] diff --git a/posthog/migrations/0099_plugin_attachment.py b/posthog/migrations/0099_plugin_attachment.py index 6cb474f0a6bac..4f49ebf712858 100644 --- a/posthog/migrations/0099_plugin_attachment.py +++ b/posthog/migrations/0099_plugin_attachment.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0098_events_property_usage"), ] @@ -14,7 +13,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="PluginAttachment", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=200)), ("content_type", models.CharField(max_length=200)), ("file_name", models.CharField(max_length=200)), @@ -22,9 +29,19 @@ class Migration(migrations.Migration): ("contents", models.BinaryField()), ( "plugin_config", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.PluginConfig"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.PluginConfig", + ), + ), + ( + "team", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.Team", + ), ), - ("team", models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.Team")), ], ), ] diff --git a/posthog/migrations/0100_action_step_max_length.py b/posthog/migrations/0100_action_step_max_length.py index a14256c139273..f81ac84ae64d3 100644 --- a/posthog/migrations/0100_action_step_max_length.py +++ b/posthog/migrations/0100_action_step_max_length.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0099_plugin_attachment"), ] diff --git a/posthog/migrations/0101_org_owners.py b/posthog/migrations/0101_org_owners.py index f9e04e97fb4e1..93effd980a703 100644 --- a/posthog/migrations/0101_org_owners.py +++ b/posthog/migrations/0101_org_owners.py @@ -20,7 +20,6 @@ def make_owners_administrators_again(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0100_action_step_max_length"), ] @@ -36,8 +35,14 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="organizationmembership", constraint=models.UniqueConstraint( - condition=models.Q(level=15), fields=("organization_id",), name="only_one_owner_per_organization" + condition=models.Q(level=15), + fields=("organization_id",), + name="only_one_owner_per_organization", ), ), - migrations.RunPython(make_first_administrators_owners, make_owners_administrators_again, elidable=True), + migrations.RunPython( + make_first_administrators_owners, + make_owners_administrators_again, + elidable=True, + ), ] diff --git a/posthog/migrations/0102_dashboarditem_filters_hash.py b/posthog/migrations/0102_dashboarditem_filters_hash.py index 188859c466359..9d3d12c9b6e88 100644 --- a/posthog/migrations/0102_dashboarditem_filters_hash.py +++ b/posthog/migrations/0102_dashboarditem_filters_hash.py @@ -24,7 +24,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0101_org_owners"), ] diff --git a/posthog/migrations/0103_retention_remove_date.py b/posthog/migrations/0103_retention_remove_date.py index 45b20942c8a9b..ed00fec812576 100644 --- a/posthog/migrations/0103_retention_remove_date.py +++ b/posthog/migrations/0103_retention_remove_date.py @@ -7,7 +7,9 @@ def forward(apps, schema_editor): DashboardItem = apps.get_model("posthog", "DashboardItem") for item in DashboardItem.objects.filter( - filters__insight="RETENTION", filters__selectedDate__isnull=False, dashboard__isnull=False + filters__insight="RETENTION", + filters__selectedDate__isnull=False, + dashboard__isnull=False, ): item.filters.pop("selectedDate") item.save() @@ -18,7 +20,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0102_dashboarditem_filters_hash"), ] diff --git a/posthog/migrations/0104_auto_20201208_1052.py b/posthog/migrations/0104_auto_20201208_1052.py index ecda95ed3a63a..15a0f8e90b9d1 100644 --- a/posthog/migrations/0104_auto_20201208_1052.py +++ b/posthog/migrations/0104_auto_20201208_1052.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0103_retention_remove_date"), ] diff --git a/posthog/migrations/0105_cohort_errors_calculating.py b/posthog/migrations/0105_cohort_errors_calculating.py index 9fb142a129edc..d0254fa311159 100644 --- a/posthog/migrations/0105_cohort_errors_calculating.py +++ b/posthog/migrations/0105_cohort_errors_calculating.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0104_auto_20201208_1052"), ] diff --git a/posthog/migrations/0106_dashboard_item_type_to_display.py b/posthog/migrations/0106_dashboard_item_type_to_display.py index dd456b59717ae..8a47a57f3c167 100644 --- a/posthog/migrations/0106_dashboard_item_type_to_display.py +++ b/posthog/migrations/0106_dashboard_item_type_to_display.py @@ -23,7 +23,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0105_cohort_errors_calculating"), ] diff --git a/posthog/migrations/0107_plugin_source.py b/posthog/migrations/0107_plugin_source.py index 3e24136c29984..b8d380da63624 100644 --- a/posthog/migrations/0107_plugin_source.py +++ b/posthog/migrations/0107_plugin_source.py @@ -15,7 +15,6 @@ def backwards(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0106_dashboard_item_type_to_display"), ] @@ -26,7 +25,12 @@ class Migration(migrations.Migration): name="plugin_type", field=models.CharField( blank=True, - choices=[("local", "local"), ("custom", "custom"), ("repository", "repository"), ("source", "source")], + choices=[ + ("local", "local"), + ("custom", "custom"), + ("repository", "repository"), + ("source", "source"), + ], default=None, max_length=200, null=True, diff --git a/posthog/migrations/0108_plugin_organization.py b/posthog/migrations/0108_plugin_organization.py index e66b63ca91f79..36a422017b66e 100644 --- a/posthog/migrations/0108_plugin_organization.py +++ b/posthog/migrations/0108_plugin_organization.py @@ -11,7 +11,6 @@ def set_plugin_organization(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0107_plugin_source"), ] diff --git a/posthog/migrations/0109_fix_retention_filters.py b/posthog/migrations/0109_fix_retention_filters.py index 087994342e5f2..b313aa87c7d00 100644 --- a/posthog/migrations/0109_fix_retention_filters.py +++ b/posthog/migrations/0109_fix_retention_filters.py @@ -17,7 +17,6 @@ def backwards(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0108_plugin_organization"), ] diff --git a/posthog/migrations/0111_plugin_storage.py b/posthog/migrations/0111_plugin_storage.py index 5d26fe5444f90..2016036bca333 100644 --- a/posthog/migrations/0111_plugin_storage.py +++ b/posthog/migrations/0111_plugin_storage.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0110_sessionrecordingeventbyteamandtimestamp"), ] @@ -14,19 +13,31 @@ class Migration(migrations.Migration): migrations.CreateModel( name="PluginStorage", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=200)), ("value", models.TextField(blank=True, null=True)), ( "plugin_config", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.PluginConfig"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.PluginConfig", + ), ), ], ), migrations.AddConstraint( model_name="pluginstorage", constraint=models.UniqueConstraint( - fields=("plugin_config_id", "key"), name="posthog_unique_plugin_storage_key" + fields=("plugin_config_id", "key"), + name="posthog_unique_plugin_storage_key", ), ), ] diff --git a/posthog/migrations/0112_sessions_filter.py b/posthog/migrations/0112_sessions_filter.py index e681e0ee54775..2667a1f5e04cb 100644 --- a/posthog/migrations/0112_sessions_filter.py +++ b/posthog/migrations/0112_sessions_filter.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0111_plugin_storage"), ] @@ -16,18 +15,35 @@ class Migration(migrations.Migration): migrations.CreateModel( name="SessionsFilter", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("name", models.CharField(blank=True, max_length=400, null=False)), ("created_at", models.DateTimeField(auto_now_add=True)), ("updated_at", models.DateTimeField(auto_now=True)), - ("filters", django.contrib.postgres.fields.jsonb.JSONField(default=dict)), + ( + "filters", + django.contrib.postgres.fields.jsonb.JSONField(default=dict), + ), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team"), + ), ], ), migrations.AddIndex( diff --git a/posthog/migrations/0113_cohort_is_static.py b/posthog/migrations/0113_cohort_is_static.py index cb76e16a26b8a..4e47813670209 100644 --- a/posthog/migrations/0113_cohort_is_static.py +++ b/posthog/migrations/0113_cohort_is_static.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0112_sessions_filter"), ] diff --git a/posthog/migrations/0114_fix_team_event_names.py b/posthog/migrations/0114_fix_team_event_names.py index 6b9143bf4c69c..a9803e62f9d4c 100644 --- a/posthog/migrations/0114_fix_team_event_names.py +++ b/posthog/migrations/0114_fix_team_event_names.py @@ -9,7 +9,9 @@ def fix_team_event_names(apps, schema_editor): old_event_names = team.event_names team.event_names = [event for event in old_event_names if isinstance(event, str)] if len(team.event_names) != len(old_event_names): - from posthog.tasks.calculate_event_property_usage import calculate_event_property_usage_for_team + from posthog.tasks.calculate_event_property_usage import ( + calculate_event_property_usage_for_team, + ) team.save() calculate_event_property_usage_for_team(team.pk) @@ -20,7 +22,6 @@ def backwards(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0113_cohort_is_static"), ] diff --git a/posthog/migrations/0115_session_recording_viewed.py b/posthog/migrations/0115_session_recording_viewed.py index af7c4adfffa86..695ce6592e144 100644 --- a/posthog/migrations/0115_session_recording_viewed.py +++ b/posthog/migrations/0115_session_recording_viewed.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0114_fix_team_event_names"), ] @@ -15,16 +14,36 @@ class Migration(migrations.Migration): migrations.CreateModel( name="SessionRecordingViewed", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True, null=True)), ("session_id", models.CharField(max_length=200)), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team")), - ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.Team"), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.AddIndex( model_name="sessionrecordingviewed", - index=models.Index(fields=["team_id", "user_id", "session_id"], name="posthog_ses_team_id_465af1_idx"), + index=models.Index( + fields=["team_id", "user_id", "session_id"], + name="posthog_ses_team_id_465af1_idx", + ), ), migrations.AlterUniqueTogether( name="sessionrecordingviewed", diff --git a/posthog/migrations/0116_plugin_latest_tag.py b/posthog/migrations/0116_plugin_latest_tag.py index d9206f419e8c0..43bdaf4b6c293 100644 --- a/posthog/migrations/0116_plugin_latest_tag.py +++ b/posthog/migrations/0116_plugin_latest_tag.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0115_session_recording_viewed"), ] diff --git a/posthog/migrations/0116_session_recording_retention_period.py b/posthog/migrations/0116_session_recording_retention_period.py index 7c22507ebb464..fcba843db267f 100644 --- a/posthog/migrations/0116_session_recording_retention_period.py +++ b/posthog/migrations/0116_session_recording_retention_period.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0115_session_recording_viewed"), ] diff --git a/posthog/migrations/0117_merge_20210126_0917.py b/posthog/migrations/0117_merge_20210126_0917.py index 58d4c58b420ed..215b295e771a2 100644 --- a/posthog/migrations/0117_merge_20210126_0917.py +++ b/posthog/migrations/0117_merge_20210126_0917.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0116_plugin_latest_tag"), ("posthog", "0116_session_recording_retention_period"), diff --git a/posthog/migrations/0118_is_demo.py b/posthog/migrations/0118_is_demo.py index ad156872b060d..a8ee78022e0ab 100644 --- a/posthog/migrations/0118_is_demo.py +++ b/posthog/migrations/0118_is_demo.py @@ -13,7 +13,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0117_merge_20210126_0917"), ] diff --git a/posthog/migrations/0119_mandatory_plugin_order.py b/posthog/migrations/0119_mandatory_plugin_order.py index 0d11fe4c5dba3..d357ac97b0c5d 100644 --- a/posthog/migrations/0119_mandatory_plugin_order.py +++ b/posthog/migrations/0119_mandatory_plugin_order.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0118_is_demo"), ] diff --git a/posthog/migrations/0120_organization_personalization.py b/posthog/migrations/0120_organization_personalization.py index dbba7585b1562..d8bbda6227122 100644 --- a/posthog/migrations/0120_organization_personalization.py +++ b/posthog/migrations/0120_organization_personalization.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0119_mandatory_plugin_order"), ] diff --git a/posthog/migrations/0122_organization_setup_section_2_completed.py b/posthog/migrations/0122_organization_setup_section_2_completed.py index 1f08e6c3eb76c..36d060caa991e 100644 --- a/posthog/migrations/0122_organization_setup_section_2_completed.py +++ b/posthog/migrations/0122_organization_setup_section_2_completed.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0121_person_email_index"), ] diff --git a/posthog/migrations/0123_organizationinvite_first_name.py b/posthog/migrations/0123_organizationinvite_first_name.py index f20482d789e85..a16a8e1f23030 100644 --- a/posthog/migrations/0123_organizationinvite_first_name.py +++ b/posthog/migrations/0123_organizationinvite_first_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0122_organization_setup_section_2_completed"), ] diff --git a/posthog/migrations/0124_unset_is_calculating_static_cohorts.py b/posthog/migrations/0124_unset_is_calculating_static_cohorts.py index ed1a3000d6a57..cb7a3bc8176dc 100644 --- a/posthog/migrations/0124_unset_is_calculating_static_cohorts.py +++ b/posthog/migrations/0124_unset_is_calculating_static_cohorts.py @@ -13,7 +13,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0123_organizationinvite_first_name"), ] diff --git a/posthog/migrations/0125_longer_webhook_url.py b/posthog/migrations/0125_longer_webhook_url.py index c87c15ae5438f..3ea0beab6aabc 100644 --- a/posthog/migrations/0125_longer_webhook_url.py +++ b/posthog/migrations/0125_longer_webhook_url.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0124_unset_is_calculating_static_cohorts"), ] diff --git a/posthog/migrations/0126_fix_funnels_insights_links.py b/posthog/migrations/0126_fix_funnels_insights_links.py index 074b95c34ebfa..5379cf8920a89 100644 --- a/posthog/migrations/0126_fix_funnels_insights_links.py +++ b/posthog/migrations/0126_fix_funnels_insights_links.py @@ -15,7 +15,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0125_longer_webhook_url"), ] diff --git a/posthog/migrations/0127_add_dashboard_filters.py b/posthog/migrations/0127_add_dashboard_filters.py index 73052b72778a5..1cd1b2d0bbc69 100644 --- a/posthog/migrations/0127_add_dashboard_filters.py +++ b/posthog/migrations/0127_add_dashboard_filters.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0126_fix_funnels_insights_links"), ] diff --git a/posthog/migrations/0127_stricter_team_data.py b/posthog/migrations/0127_stricter_team_data.py index 64d4d15703251..3a2e6a0ba733d 100644 --- a/posthog/migrations/0127_stricter_team_data.py +++ b/posthog/migrations/0127_stricter_team_data.py @@ -17,11 +17,14 @@ def adjust_teams_for_stricter_requirements(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0126_fix_funnels_insights_links"), ] operations = [ - migrations.RunPython(adjust_teams_for_stricter_requirements, migrations.RunPython.noop, elidable=True), + migrations.RunPython( + adjust_teams_for_stricter_requirements, + migrations.RunPython.noop, + elidable=True, + ), ] diff --git a/posthog/migrations/0128_stricter_team_schema.py b/posthog/migrations/0128_stricter_team_schema.py index c974080e643da..26f0210798515 100644 --- a/posthog/migrations/0128_stricter_team_schema.py +++ b/posthog/migrations/0128_stricter_team_schema.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0127_stricter_team_data"), ] diff --git a/posthog/migrations/0129_merge_20210223_0757.py b/posthog/migrations/0129_merge_20210223_0757.py index 919bfb5ada376..4f28607b0bec2 100644 --- a/posthog/migrations/0129_merge_20210223_0757.py +++ b/posthog/migrations/0129_merge_20210223_0757.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0128_stricter_team_schema"), ("posthog", "0127_add_dashboard_filters"), diff --git a/posthog/migrations/0130_dashboard_creation_mode.py b/posthog/migrations/0130_dashboard_creation_mode.py index a163da42b2e05..cf102ef97a5c9 100644 --- a/posthog/migrations/0130_dashboard_creation_mode.py +++ b/posthog/migrations/0130_dashboard_creation_mode.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0129_merge_20210223_0757"), ] @@ -14,7 +13,11 @@ class Migration(migrations.Migration): model_name="dashboard", name="creation_mode", field=models.CharField( - choices=[("default", "Default"), ("template", "Template"), ("duplicate", "Duplicate")], + choices=[ + ("default", "Default"), + ("template", "Template"), + ("duplicate", "Duplicate"), + ], default="default", max_length=16, ), diff --git a/posthog/migrations/0131_add_plugins_updated_created_at.py b/posthog/migrations/0131_add_plugins_updated_created_at.py index 873214abf6a73..1c3ef33ab1b0d 100644 --- a/posthog/migrations/0131_add_plugins_updated_created_at.py +++ b/posthog/migrations/0131_add_plugins_updated_created_at.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0130_dashboard_creation_mode"), ] diff --git a/posthog/migrations/0132_team_test_account_filters.py b/posthog/migrations/0132_team_test_account_filters.py index 339ac4ec5ceee..313de9f3355e4 100644 --- a/posthog/migrations/0132_team_test_account_filters.py +++ b/posthog/migrations/0132_team_test_account_filters.py @@ -40,7 +40,12 @@ def forward(apps, schema_editor): { "key": "$host", "operator": "is_not", - "value": ["localhost:8000", "localhost:5000", "127.0.0.1:8000", "127.0.0.1:3000"], + "value": [ + "localhost:8000", + "localhost:5000", + "127.0.0.1:8000", + "127.0.0.1:3000", + ], }, ] if team.organization: @@ -51,7 +56,12 @@ def forward(apps, schema_editor): example_email = re.search(r"@[\w.]+", example_emails[0]) if example_email: filters += [ - {"key": "email", "operator": "not_icontains", "value": example_email.group(), "type": "person"}, + { + "key": "email", + "operator": "not_icontains", + "value": example_email.group(), + "type": "person", + }, ] team.test_account_filters = filters team.save() @@ -62,7 +72,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0131_add_plugins_updated_created_at"), ] diff --git a/posthog/migrations/0133_plugins_access_control.py b/posthog/migrations/0133_plugins_access_control.py index 824853fdb0666..0e10347b18393 100644 --- a/posthog/migrations/0133_plugins_access_control.py +++ b/posthog/migrations/0133_plugins_access_control.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0132_team_test_account_filters"), ] @@ -33,7 +32,10 @@ class Migration(migrations.Migration): model_name="team", name="app_urls", field=fields.ArrayField( - base_field=models.CharField(max_length=200, null=True), blank=True, default=list, size=None + base_field=models.CharField(max_length=200, null=True), + blank=True, + default=list, + size=None, ), ), migrations.AlterField( diff --git a/posthog/migrations/0134_event_site_url.py b/posthog/migrations/0134_event_site_url.py index 0096edcf1b74f..c70de913a77b8 100644 --- a/posthog/migrations/0134_event_site_url.py +++ b/posthog/migrations/0134_event_site_url.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0133_plugins_access_control"), ] diff --git a/posthog/migrations/0135_plugins_on_cloud.py b/posthog/migrations/0135_plugins_on_cloud.py index 38afccfb7dfdf..bbf7165f956bf 100644 --- a/posthog/migrations/0135_plugins_on_cloud.py +++ b/posthog/migrations/0135_plugins_on_cloud.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0134_event_site_url"), ] diff --git a/posthog/migrations/0136_global_plugin_attachments.py b/posthog/migrations/0136_global_plugin_attachments.py index cd269c296886b..b0207e75bb878 100644 --- a/posthog/migrations/0136_global_plugin_attachments.py +++ b/posthog/migrations/0136_global_plugin_attachments.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0135_plugins_on_cloud"), ] @@ -14,6 +13,10 @@ class Migration(migrations.Migration): migrations.AlterField( model_name="pluginattachment", name="plugin_config", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.PluginConfig"), + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.PluginConfig", + ), ), ] diff --git a/posthog/migrations/0137_team_timezone.py b/posthog/migrations/0137_team_timezone.py index 09c9077a4172c..b7b31ff04f75c 100644 --- a/posthog/migrations/0137_team_timezone.py +++ b/posthog/migrations/0137_team_timezone.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0136_global_plugin_attachments"), ] @@ -72,13 +71,19 @@ class Migration(migrations.Migration): ("America/Anguilla", "America/Anguilla"), ("America/Antigua", "America/Antigua"), ("America/Araguaina", "America/Araguaina"), - ("America/Argentina/Buenos_Aires", "America/Argentina/Buenos_Aires"), + ( + "America/Argentina/Buenos_Aires", + "America/Argentina/Buenos_Aires", + ), ("America/Argentina/Catamarca", "America/Argentina/Catamarca"), ("America/Argentina/Cordoba", "America/Argentina/Cordoba"), ("America/Argentina/Jujuy", "America/Argentina/Jujuy"), ("America/Argentina/La_Rioja", "America/Argentina/La_Rioja"), ("America/Argentina/Mendoza", "America/Argentina/Mendoza"), - ("America/Argentina/Rio_Gallegos", "America/Argentina/Rio_Gallegos"), + ( + "America/Argentina/Rio_Gallegos", + "America/Argentina/Rio_Gallegos", + ), ("America/Argentina/Salta", "America/Argentina/Salta"), ("America/Argentina/San_Juan", "America/Argentina/San_Juan"), ("America/Argentina/San_Luis", "America/Argentina/San_Luis"), @@ -172,7 +177,10 @@ class Migration(migrations.Migration): ("America/Noronha", "America/Noronha"), ("America/North_Dakota/Beulah", "America/North_Dakota/Beulah"), ("America/North_Dakota/Center", "America/North_Dakota/Center"), - ("America/North_Dakota/New_Salem", "America/North_Dakota/New_Salem"), + ( + "America/North_Dakota/New_Salem", + "America/North_Dakota/New_Salem", + ), ("America/Nuuk", "America/Nuuk"), ("America/Ojinaga", "America/Ojinaga"), ("America/Panama", "America/Panama"), diff --git a/posthog/migrations/0138_featureflag_name_optional.py b/posthog/migrations/0138_featureflag_name_optional.py index 91cea1870b0f4..95e892cc419fb 100644 --- a/posthog/migrations/0138_featureflag_name_optional.py +++ b/posthog/migrations/0138_featureflag_name_optional.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0137_team_timezone"), ] diff --git a/posthog/migrations/0139_dashboard_tagging.py b/posthog/migrations/0139_dashboard_tagging.py index f16d90cd0933c..f5b1d24d1db8b 100644 --- a/posthog/migrations/0139_dashboard_tagging.py +++ b/posthog/migrations/0139_dashboard_tagging.py @@ -5,13 +5,16 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0138_featureflag_name_optional"), ] operations = [ - migrations.AddField(model_name="dashboard", name="description", field=models.TextField(blank=True)), + migrations.AddField( + model_name="dashboard", + name="description", + field=models.TextField(blank=True), + ), migrations.AddField( model_name="dashboard", name="tags", diff --git a/posthog/migrations/0140_team_data_attributes.py b/posthog/migrations/0140_team_data_attributes.py index 4d832106ebe5b..3a53999977c36 100644 --- a/posthog/migrations/0140_team_data_attributes.py +++ b/posthog/migrations/0140_team_data_attributes.py @@ -10,7 +10,6 @@ def set_default_data_attributes(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0139_dashboard_tagging"), ] diff --git a/posthog/migrations/0142_fix_team_data_attributes_default.py b/posthog/migrations/0142_fix_team_data_attributes_default.py index be1c263053aac..1e24b7d580234 100644 --- a/posthog/migrations/0142_fix_team_data_attributes_default.py +++ b/posthog/migrations/0142_fix_team_data_attributes_default.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0141_events_created_at_index"), ] diff --git a/posthog/migrations/0143_user_uuid.py b/posthog/migrations/0143_user_uuid.py index 484c78c1db1d3..8e3d2cb7ae384 100644 --- a/posthog/migrations/0143_user_uuid.py +++ b/posthog/migrations/0143_user_uuid.py @@ -17,7 +17,6 @@ def backwards(app, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0142_fix_team_data_attributes_default"), ] diff --git a/posthog/migrations/0144_update_django_3_1_8.py b/posthog/migrations/0144_update_django_3_1_8.py index a063dbfaf77d0..375a532b7456c 100644 --- a/posthog/migrations/0144_update_django_3_1_8.py +++ b/posthog/migrations/0144_update_django_3_1_8.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0143_user_uuid"), ] diff --git a/posthog/migrations/0145_eventdefinition_propertydefinition.py b/posthog/migrations/0145_eventdefinition_propertydefinition.py index 6ebf7328ae3e4..e97e52b591120 100644 --- a/posthog/migrations/0145_eventdefinition_propertydefinition.py +++ b/posthog/migrations/0145_eventdefinition_propertydefinition.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0144_update_django_3_1_8"), ] @@ -19,7 +18,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=400)), @@ -46,7 +48,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=400)), diff --git a/posthog/migrations/0146_eventproperty_sync.py b/posthog/migrations/0146_eventproperty_sync.py index a83f455f1bbf9..a53527c2ad2bf 100644 --- a/posthog/migrations/0146_eventproperty_sync.py +++ b/posthog/migrations/0146_eventproperty_sync.py @@ -12,7 +12,6 @@ def sync_event_and_properties_definitions(team_uuid: str, Team, EventDefinition, PropertyDefinition) -> None: - team = None # It is possible that the team was deleted before the task could run @@ -64,11 +63,14 @@ def sync_team_event_names_and_properties(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0145_eventdefinition_propertydefinition"), ] operations = [ - migrations.RunPython(sync_team_event_names_and_properties, migrations.RunPython.noop, elidable=True), + migrations.RunPython( + sync_team_event_names_and_properties, + migrations.RunPython.noop, + elidable=True, + ), ] diff --git a/posthog/migrations/0147_fix_stickiness_dashboard_items.py b/posthog/migrations/0147_fix_stickiness_dashboard_items.py index 212aa8a489867..ba7954637f05b 100644 --- a/posthog/migrations/0147_fix_stickiness_dashboard_items.py +++ b/posthog/migrations/0147_fix_stickiness_dashboard_items.py @@ -11,7 +11,6 @@ def update_stickiness(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0146_eventproperty_sync"), ] diff --git a/posthog/migrations/0147_plugin_logs.py b/posthog/migrations/0147_plugin_logs.py index e5163836374b4..5d2844aa81c80 100644 --- a/posthog/migrations/0147_plugin_logs.py +++ b/posthog/migrations/0147_plugin_logs.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0146_eventproperty_sync"), ] @@ -20,7 +19,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("timestamp", models.DateTimeField(default=django.utils.timezone.now)), @@ -50,16 +52,28 @@ class Migration(migrations.Migration): ), ("message", models.TextField(db_index=True)), ("instance_id", models.UUIDField()), - ("plugin", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin")), + ( + "plugin", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin"), + ), ( "plugin_config", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.pluginconfig"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.pluginconfig", + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), migrations.AddIndex( model_name="pluginlogentry", - index=models.Index(fields=["plugin_config_id", "timestamp"], name="posthog_plu_plugin__736133_idx"), + index=models.Index( + fields=["plugin_config_id", "timestamp"], + name="posthog_plu_plugin__736133_idx", + ), ), ] diff --git a/posthog/migrations/0148_merge_20210506_0823.py b/posthog/migrations/0148_merge_20210506_0823.py index 3bb39d35e22a1..3e880cf836478 100644 --- a/posthog/migrations/0148_merge_20210506_0823.py +++ b/posthog/migrations/0148_merge_20210506_0823.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0147_plugin_logs"), ("posthog", "0147_fix_stickiness_dashboard_items"), diff --git a/posthog/migrations/0149_fix_lifecycle_dashboard_items.py b/posthog/migrations/0149_fix_lifecycle_dashboard_items.py index 1bd984dda628d..0890b104ec6f4 100644 --- a/posthog/migrations/0149_fix_lifecycle_dashboard_items.py +++ b/posthog/migrations/0149_fix_lifecycle_dashboard_items.py @@ -11,7 +11,6 @@ def update_lifecycle(apps, _): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0148_merge_20210506_0823"), ] diff --git a/posthog/migrations/0150_increase_element_varchars.py b/posthog/migrations/0150_increase_element_varchars.py index ad622bd0f727d..93f4df18194b4 100644 --- a/posthog/migrations/0150_increase_element_varchars.py +++ b/posthog/migrations/0150_increase_element_varchars.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0149_fix_lifecycle_dashboard_items"), ] diff --git a/posthog/migrations/0151_plugin_preinstalled.py b/posthog/migrations/0151_plugin_preinstalled.py index f008fcf5c506e..d42fc8ede9904 100644 --- a/posthog/migrations/0151_plugin_preinstalled.py +++ b/posthog/migrations/0151_plugin_preinstalled.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0150_increase_element_varchars"), ] diff --git a/posthog/migrations/0152_user_events_column_config.py b/posthog/migrations/0152_user_events_column_config.py index e543eb2dbe967..0b8a311f2f2e3 100644 --- a/posthog/migrations/0152_user_events_column_config.py +++ b/posthog/migrations/0152_user_events_column_config.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0151_plugin_preinstalled"), ] diff --git a/posthog/migrations/0153_plugin_capabilities.py b/posthog/migrations/0153_plugin_capabilities.py index 1543275859024..a5b14ba10f758 100644 --- a/posthog/migrations/0153_plugin_capabilities.py +++ b/posthog/migrations/0153_plugin_capabilities.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0152_user_events_column_config"), ] diff --git a/posthog/migrations/0154_organization_for_internal_metrics.py b/posthog/migrations/0154_organization_for_internal_metrics.py index 0e27e01bc65c3..af56792e73be0 100644 --- a/posthog/migrations/0154_organization_for_internal_metrics.py +++ b/posthog/migrations/0154_organization_for_internal_metrics.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0153_plugin_capabilities"), ] diff --git a/posthog/migrations/0155_organization_available_features.py b/posthog/migrations/0155_organization_available_features.py index 2b2fe5de8e3c6..d0bda03ae5a79 100644 --- a/posthog/migrations/0155_organization_available_features.py +++ b/posthog/migrations/0155_organization_available_features.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0154_organization_for_internal_metrics"), ] @@ -15,7 +14,10 @@ class Migration(migrations.Migration): model_name="organization", name="available_features", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=64), blank=True, default=list, size=None + base_field=models.CharField(max_length=64), + blank=True, + default=list, + size=None, ), ), ] diff --git a/posthog/migrations/0157_plugin_metrics.py b/posthog/migrations/0157_plugin_metrics.py index 64ce3cdf26008..a33e2e9c3c993 100644 --- a/posthog/migrations/0157_plugin_metrics.py +++ b/posthog/migrations/0157_plugin_metrics.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0156_insight_short_id"), ] diff --git a/posthog/migrations/0158_new_token_format.py b/posthog/migrations/0158_new_token_format.py index d4118cb976040..165a7b9b2fcb5 100644 --- a/posthog/migrations/0158_new_token_format.py +++ b/posthog/migrations/0158_new_token_format.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0157_plugin_metrics"), ] @@ -17,7 +16,10 @@ class Migration(migrations.Migration): model_name="personalapikey", name="value", field=models.CharField( - default=posthog.models.utils.generate_random_token_personal, editable=False, max_length=50, unique=True + default=posthog.models.utils.generate_random_token_personal, + editable=False, + max_length=50, + unique=True, ), ), migrations.AlterField( diff --git a/posthog/migrations/0160_organization_domain_whitelist.py b/posthog/migrations/0160_organization_domain_whitelist.py index f277a8b3b4865..03307b3a5efc9 100644 --- a/posthog/migrations/0160_organization_domain_whitelist.py +++ b/posthog/migrations/0160_organization_domain_whitelist.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0159_remove_funnels_with_breakdown"), ] @@ -15,7 +14,10 @@ class Migration(migrations.Migration): model_name="organization", name="domain_whitelist", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=256), blank=True, default=list, size=None + base_field=models.CharField(max_length=256), + blank=True, + default=list, + size=None, ), ), ] diff --git a/posthog/migrations/0161_property_defs_search.py b/posthog/migrations/0161_property_defs_search.py index 96dd9f6d9b713..5963799289e95 100644 --- a/posthog/migrations/0161_property_defs_search.py +++ b/posthog/migrations/0161_property_defs_search.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0160_organization_domain_whitelist"), ] @@ -15,10 +14,18 @@ class Migration(migrations.Migration): TrigramExtension(), migrations.AddIndex( model_name="eventdefinition", - index=GinIndex(fields=["name"], name="index_event_definition_name", opclasses=["gin_trgm_ops"]), + index=GinIndex( + fields=["name"], + name="index_event_definition_name", + opclasses=["gin_trgm_ops"], + ), ), migrations.AddIndex( model_name="propertydefinition", - index=GinIndex(fields=["name"], name="index_property_definition_name", opclasses=["gin_trgm_ops"]), + index=GinIndex( + fields=["name"], + name="index_property_definition_name", + opclasses=["gin_trgm_ops"], + ), ), ] diff --git a/posthog/migrations/0162_organization_is_member_join_email_enabled.py b/posthog/migrations/0162_organization_is_member_join_email_enabled.py index 1277559492597..0d632f1231b78 100644 --- a/posthog/migrations/0162_organization_is_member_join_email_enabled.py +++ b/posthog/migrations/0162_organization_is_member_join_email_enabled.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0161_property_defs_search"), ] diff --git a/posthog/migrations/0163_insights_favorited_updatedat_tags.py b/posthog/migrations/0163_insights_favorited_updatedat_tags.py index c5a40dcc94e66..2d84e5b65fa2c 100644 --- a/posthog/migrations/0163_insights_favorited_updatedat_tags.py +++ b/posthog/migrations/0163_insights_favorited_updatedat_tags.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0162_organization_is_member_join_email_enabled"), ] @@ -20,7 +19,10 @@ class Migration(migrations.Migration): model_name="dashboarditem", name="tags", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=list, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=list, + size=None, ), ), migrations.AddField( diff --git a/posthog/migrations/0165_dashboarditem_dive_dashboard.py b/posthog/migrations/0165_dashboarditem_dive_dashboard.py index a38a150f89772..6ac346a7cddb1 100644 --- a/posthog/migrations/0165_dashboarditem_dive_dashboard.py +++ b/posthog/migrations/0165_dashboarditem_dive_dashboard.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0164_person_index_by_team_and_id"), ] @@ -15,7 +14,10 @@ class Migration(migrations.Migration): model_name="dashboarditem", name="dive_dashboard", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="posthog.dashboard" + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="posthog.dashboard", ), ), ] diff --git a/posthog/migrations/0166_plugin_public_jobs.py b/posthog/migrations/0166_plugin_public_jobs.py index 082bd8e5be961..2a1ea2976b7e5 100644 --- a/posthog/migrations/0166_plugin_public_jobs.py +++ b/posthog/migrations/0166_plugin_public_jobs.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0165_dashboarditem_dive_dashboard"), ] diff --git a/posthog/migrations/0167_feature_flag_override.py b/posthog/migrations/0167_feature_flag_override.py index 3dc9007b090a9..d8612997f9aea 100644 --- a/posthog/migrations/0167_feature_flag_override.py +++ b/posthog/migrations/0167_feature_flag_override.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0166_plugin_public_jobs"), ] @@ -15,20 +14,41 @@ class Migration(migrations.Migration): migrations.CreateModel( name="FeatureFlagOverride", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("override_value", models.JSONField()), ( "feature_flag", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.featureflag"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.featureflag", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), ), - ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), migrations.AddConstraint( model_name="featureflagoverride", constraint=models.UniqueConstraint( - fields=("user", "feature_flag", "team"), name="unique feature flag for a user/team combo" + fields=("user", "feature_flag", "team"), + name="unique feature flag for a user/team combo", ), ), ] diff --git a/posthog/migrations/0169_person_properties_last_updated_at.py b/posthog/migrations/0169_person_properties_last_updated_at.py index cec9034911e9e..381bc7a25c81d 100644 --- a/posthog/migrations/0169_person_properties_last_updated_at.py +++ b/posthog/migrations/0169_person_properties_last_updated_at.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0168_action_step_empty_string_reset"), ] diff --git a/posthog/migrations/0170_project_based_permissioning.py b/posthog/migrations/0170_project_based_permissioning.py index 7f214bc84f48e..0f774199b0001 100644 --- a/posthog/migrations/0170_project_based_permissioning.py +++ b/posthog/migrations/0170_project_based_permissioning.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0169_person_properties_last_updated_at"), ] diff --git a/posthog/migrations/0171_cohort_description.py b/posthog/migrations/0171_cohort_description.py index 951f992947026..70f78f35e30a5 100644 --- a/posthog/migrations/0171_cohort_description.py +++ b/posthog/migrations/0171_cohort_description.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0170_project_based_permissioning"), ] diff --git a/posthog/migrations/0172_person_properties_last_operation.py b/posthog/migrations/0172_person_properties_last_operation.py index c2d8fbb5b6543..de91895bf629d 100644 --- a/posthog/migrations/0172_person_properties_last_operation.py +++ b/posthog/migrations/0172_person_properties_last_operation.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0171_cohort_description"), ] diff --git a/posthog/migrations/0173_should_update_person_props_function.py b/posthog/migrations/0173_should_update_person_props_function.py index 57c0b37fbe87e..dae88cbdfbf13 100644 --- a/posthog/migrations/0173_should_update_person_props_function.py +++ b/posthog/migrations/0173_should_update_person_props_function.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0172_person_properties_last_operation"), ] diff --git a/posthog/migrations/0174_organization_slug.py b/posthog/migrations/0174_organization_slug.py index 8d79133fb2b51..8bdd611d3b9e0 100644 --- a/posthog/migrations/0174_organization_slug.py +++ b/posthog/migrations/0174_organization_slug.py @@ -28,7 +28,6 @@ def slugify_all(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0173_should_update_person_props_function"), ] diff --git a/posthog/migrations/0175_should_update_person_props_function.py b/posthog/migrations/0175_should_update_person_props_function.py index b90a896e5c139..356028422d461 100644 --- a/posthog/migrations/0175_should_update_person_props_function.py +++ b/posthog/migrations/0175_should_update_person_props_function.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0174_organization_slug"), ] diff --git a/posthog/migrations/0176_update_person_props_function.py b/posthog/migrations/0176_update_person_props_function.py index 3f0f1f6e919ad..974fb90062026 100644 --- a/posthog/migrations/0176_update_person_props_function.py +++ b/posthog/migrations/0176_update_person_props_function.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0175_should_update_person_props_function"), ] diff --git a/posthog/migrations/0177_path_cleaning_filters.py b/posthog/migrations/0177_path_cleaning_filters.py index 3e731d7787655..adaf7819b135c 100644 --- a/posthog/migrations/0177_path_cleaning_filters.py +++ b/posthog/migrations/0177_path_cleaning_filters.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0176_update_person_props_function"), ] diff --git a/posthog/migrations/0178_rename_dashboard_item_to_insight.py b/posthog/migrations/0178_rename_dashboard_item_to_insight.py index 1c6bf777abf49..2cea98aedaa7c 100644 --- a/posthog/migrations/0178_rename_dashboard_item_to_insight.py +++ b/posthog/migrations/0178_rename_dashboard_item_to_insight.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0177_path_cleaning_filters"), ] diff --git a/posthog/migrations/0179_add_group_type_mapping.py b/posthog/migrations/0179_add_group_type_mapping.py index e977ee52079f9..47311ff1756ab 100644 --- a/posthog/migrations/0179_add_group_type_mapping.py +++ b/posthog/migrations/0179_add_group_type_mapping.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0178_rename_dashboard_item_to_insight"), ] @@ -14,10 +13,21 @@ class Migration(migrations.Migration): migrations.CreateModel( name="GroupTypeMapping", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("group_type", models.CharField(max_length=400)), ("group_type_index", models.IntegerField()), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddConstraint( @@ -27,13 +37,15 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="grouptypemapping", constraint=models.UniqueConstraint( - fields=("team", "group_type_index"), name="unique event column indexes for team" + fields=("team", "group_type_index"), + name="unique event column indexes for team", ), ), migrations.AddConstraint( model_name="grouptypemapping", constraint=models.CheckConstraint( - check=models.Q(("group_type_index__lte", 5)), name="group_type_index is less than or equal 5" + check=models.Q(("group_type_index__lte", 5)), + name="group_type_index is less than or equal 5", ), ), ] diff --git a/posthog/migrations/0180_person_version.py b/posthog/migrations/0180_person_version.py index 46d008ccf5f35..d6d48d7643f78 100644 --- a/posthog/migrations/0180_person_version.py +++ b/posthog/migrations/0180_person_version.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0179_add_group_type_mapping"), ] diff --git a/posthog/migrations/0181_team_correlation_config.py b/posthog/migrations/0181_team_correlation_config.py index 5d8dc4f3a80bd..9bc7be124d8c6 100644 --- a/posthog/migrations/0181_team_correlation_config.py +++ b/posthog/migrations/0181_team_correlation_config.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0180_person_version"), ] diff --git a/posthog/migrations/0182_sessionrecordingevent_window_id.py b/posthog/migrations/0182_sessionrecordingevent_window_id.py index 1bfcb9b9b71a9..ba59120a0bbb9 100644 --- a/posthog/migrations/0182_sessionrecordingevent_window_id.py +++ b/posthog/migrations/0182_sessionrecordingevent_window_id.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0181_team_correlation_config"), ] diff --git a/posthog/migrations/0183_groups_pg.py b/posthog/migrations/0183_groups_pg.py index 2924c8a581c05..ff5d69ffb0cce 100644 --- a/posthog/migrations/0183_groups_pg.py +++ b/posthog/migrations/0183_groups_pg.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0182_sessionrecordingevent_window_id"), ] @@ -14,7 +13,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Group", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("group_key", models.CharField(max_length=400)), ("group_type_index", models.IntegerField()), ("group_properties", models.JSONField(default=dict)), @@ -22,7 +29,10 @@ class Migration(migrations.Migration): ("properties_last_updated_at", models.JSONField(default=dict)), ("properties_last_operation", models.JSONField(default=dict)), ("version", models.BigIntegerField()), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddConstraint( diff --git a/posthog/migrations/0184_delete_sessionsfilter.py b/posthog/migrations/0184_delete_sessionsfilter.py index 4d5c6d7fda3f1..44df92d321541 100644 --- a/posthog/migrations/0184_delete_sessionsfilter.py +++ b/posthog/migrations/0184_delete_sessionsfilter.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0183_groups_pg"), ] diff --git a/posthog/migrations/0185_special_migrations.py b/posthog/migrations/0185_special_migrations.py index ef779c7aafcde..3c95dcb6e2a40 100644 --- a/posthog/migrations/0185_special_migrations.py +++ b/posthog/migrations/0185_special_migrations.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0184_delete_sessionsfilter"), ] @@ -15,17 +14,29 @@ class Migration(migrations.Migration): fields=[ ("id", models.BigAutoField(primary_key=True, serialize=False)), ("name", models.CharField(max_length=50)), - ("description", models.CharField(blank=True, max_length=400, null=True)), + ( + "description", + models.CharField(blank=True, max_length=400, null=True), + ), ("progress", models.PositiveSmallIntegerField(default=0)), ("status", models.PositiveSmallIntegerField(default=0)), - ("current_operation_index", models.PositiveSmallIntegerField(default=0)), + ( + "current_operation_index", + models.PositiveSmallIntegerField(default=0), + ), ("current_query_id", models.CharField(default="", max_length=100)), ("celery_task_id", models.CharField(default="", max_length=100)), ("started_at", models.DateTimeField(blank=True, null=True)), ("finished_at", models.DateTimeField(blank=True, null=True)), ("last_error", models.TextField(blank=True, null=True)), - ("posthog_min_version", models.CharField(blank=True, max_length=20, null=True)), - ("posthog_max_version", models.CharField(blank=True, max_length=20, null=True)), + ( + "posthog_min_version", + models.CharField(blank=True, max_length=20, null=True), + ), + ( + "posthog_max_version", + models.CharField(blank=True, max_length=20, null=True), + ), ], ), migrations.AddConstraint( diff --git a/posthog/migrations/0186_insight_refresh_attempt.py b/posthog/migrations/0186_insight_refresh_attempt.py index 052ef134cbb68..2f643dfe7d070 100644 --- a/posthog/migrations/0186_insight_refresh_attempt.py +++ b/posthog/migrations/0186_insight_refresh_attempt.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0185_special_migrations"), ] diff --git a/posthog/migrations/0187_stale_events.py b/posthog/migrations/0187_stale_events.py index c7aad3faeec0e..3fa16bd4457fa 100644 --- a/posthog/migrations/0187_stale_events.py +++ b/posthog/migrations/0187_stale_events.py @@ -4,7 +4,6 @@ def set_created_at(apps, schema_editor): - try: from posthog.client import sync_execute except ImportError: @@ -32,7 +31,6 @@ def set_created_at(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0186_insight_refresh_attempt"), ] diff --git a/posthog/migrations/0188_person_distinct_id_version.py b/posthog/migrations/0188_person_distinct_id_version.py index 0d068def31b40..57f2ab732499d 100644 --- a/posthog/migrations/0188_person_distinct_id_version.py +++ b/posthog/migrations/0188_person_distinct_id_version.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0187_stale_events"), ] diff --git a/posthog/migrations/0189_alter_annotation_scope.py b/posthog/migrations/0189_alter_annotation_scope.py index 2b0bc6b7ce2f1..c8eb52286e0c5 100644 --- a/posthog/migrations/0189_alter_annotation_scope.py +++ b/posthog/migrations/0189_alter_annotation_scope.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0188_person_distinct_id_version"), ] @@ -14,7 +13,11 @@ class Migration(migrations.Migration): model_name="annotation", name="scope", field=models.CharField( - choices=[("dashboard_item", "insight"), ("project", "project"), ("organization", "organization")], + choices=[ + ("dashboard_item", "insight"), + ("project", "project"), + ("organization", "organization"), + ], default="dashboard_item", max_length=24, ), diff --git a/posthog/migrations/0190_experiment.py b/posthog/migrations/0190_experiment.py index cec03a5e882d5..071722ba18e6c 100644 --- a/posthog/migrations/0190_experiment.py +++ b/posthog/migrations/0190_experiment.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0189_alter_annotation_scope"), ] @@ -16,9 +15,20 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Experiment", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("name", models.CharField(max_length=400)), - ("description", models.CharField(blank=True, max_length=400, null=True)), + ( + "description", + models.CharField(blank=True, max_length=400, null=True), + ), ("filters", models.JSONField(default=dict)), ("parameters", models.JSONField(default=dict, null=True)), ("start_date", models.DateTimeField(null=True)), @@ -27,13 +37,22 @@ class Migration(migrations.Migration): ("updated_at", models.DateTimeField(auto_now=True)), ( "created_by", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), ( "feature_flag", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.featureflag"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.featureflag", + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), ] diff --git a/posthog/migrations/0191_rename_specialmigration_asyncmigration.py b/posthog/migrations/0191_rename_specialmigration_asyncmigration.py index 89455a47093bc..518b6cd3032a1 100644 --- a/posthog/migrations/0191_rename_specialmigration_asyncmigration.py +++ b/posthog/migrations/0191_rename_specialmigration_asyncmigration.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0190_experiment"), ] diff --git a/posthog/migrations/0192_event_properties.py b/posthog/migrations/0192_event_properties.py index d2d1c0afd023d..90cd831c338e0 100644 --- a/posthog/migrations/0192_event_properties.py +++ b/posthog/migrations/0192_event_properties.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0191_rename_specialmigration_asyncmigration"), ] @@ -15,7 +14,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="EventProperty", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("event", models.CharField(max_length=400)), ("property", models.CharField(max_length=400)), ( @@ -38,7 +45,8 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="eventproperty", constraint=models.UniqueConstraint( - fields=("team", "event", "property"), name="posthog_event_property_unique_team_event_property" + fields=("team", "event", "property"), + name="posthog_event_property_unique_team_event_property", ), ), ] diff --git a/posthog/migrations/0193_auto_20211222_0912.py b/posthog/migrations/0193_auto_20211222_0912.py index c5236c5300b02..a028cb6a7649f 100644 --- a/posthog/migrations/0193_auto_20211222_0912.py +++ b/posthog/migrations/0193_auto_20211222_0912.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0192_event_properties"), ] @@ -44,8 +43,14 @@ class Migration(migrations.Migration): constraint=models.CheckConstraint( check=models.Q( models.Q( - ("property_type__in", ["DateTime", "String", "Numeric", "Boolean"]), - ("property_type_format__in", ["unix_timestamp", "YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD"]), + ( + "property_type__in", + ["DateTime", "String", "Numeric", "Boolean"], + ), + ( + "property_type_format__in", + ["unix_timestamp", "YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD"], + ), ) ), name="property_type_and_format_are_valid", diff --git a/posthog/migrations/0194_set_property_type_for_time.py b/posthog/migrations/0194_set_property_type_for_time.py index 40a11c5cf10c9..452c46f18a144 100644 --- a/posthog/migrations/0194_set_property_type_for_time.py +++ b/posthog/migrations/0194_set_property_type_for_time.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0193_auto_20211222_0912"), ] diff --git a/posthog/migrations/0195_group_type_name.py b/posthog/migrations/0195_group_type_name.py index 4f818f3d3384c..c100c26b761f9 100644 --- a/posthog/migrations/0195_group_type_name.py +++ b/posthog/migrations/0195_group_type_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0194_set_property_type_for_time"), ] diff --git a/posthog/migrations/0196_update_property_types.py b/posthog/migrations/0196_update_property_types.py index 790cfed311ae3..230a536e0254c 100644 --- a/posthog/migrations/0196_update_property_types.py +++ b/posthog/migrations/0196_update_property_types.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0195_group_type_name"), ] @@ -39,7 +38,10 @@ class Migration(migrations.Migration): constraint=models.CheckConstraint( check=models.Q( models.Q( - ("property_type__in", ["DateTime", "String", "Numeric", "Boolean"]), + ( + "property_type__in", + ["DateTime", "String", "Numeric", "Boolean"], + ), ( "property_type_format__in", [ diff --git a/posthog/migrations/0197_plugin_is_stateless.py b/posthog/migrations/0197_plugin_is_stateless.py index 0b09f00404905..32d9018c89257 100644 --- a/posthog/migrations/0197_plugin_is_stateless.py +++ b/posthog/migrations/0197_plugin_is_stateless.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0196_update_property_types"), ] diff --git a/posthog/migrations/0198_async_migration_error.py b/posthog/migrations/0198_async_migration_error.py index 1fca61f08fca8..bd1932c772b76 100644 --- a/posthog/migrations/0198_async_migration_error.py +++ b/posthog/migrations/0198_async_migration_error.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0197_plugin_is_stateless"), ] @@ -23,7 +22,10 @@ class Migration(migrations.Migration): ("description", models.CharField(max_length=400)), ( "async_migration", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.asyncmigration"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.asyncmigration", + ), ), ], ), diff --git a/posthog/migrations/0199_update_experiment_model.py b/posthog/migrations/0199_update_experiment_model.py index dc1d1aa4cf5ff..eac2ce551d81c 100644 --- a/posthog/migrations/0199_update_experiment_model.py +++ b/posthog/migrations/0199_update_experiment_model.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0198_async_migration_error"), ] diff --git a/posthog/migrations/0200_insight_last_modified.py b/posthog/migrations/0200_insight_last_modified.py index 61335edd71dff..6b568c4381480 100644 --- a/posthog/migrations/0200_insight_last_modified.py +++ b/posthog/migrations/0200_insight_last_modified.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0199_update_experiment_model"), ] diff --git a/posthog/migrations/0201_remove_property_type_format_constraint.py b/posthog/migrations/0201_remove_property_type_format_constraint.py index 83435e998f337..4451b2d81b1b7 100644 --- a/posthog/migrations/0201_remove_property_type_format_constraint.py +++ b/posthog/migrations/0201_remove_property_type_format_constraint.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0200_insight_last_modified"), ] diff --git a/posthog/migrations/0202_descriptions_for_action.py b/posthog/migrations/0202_descriptions_for_action.py index d446196299d02..cfda7fdf75f07 100644 --- a/posthog/migrations/0202_descriptions_for_action.py +++ b/posthog/migrations/0202_descriptions_for_action.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0201_remove_property_type_format_constraint"), ] diff --git a/posthog/migrations/0203_dashboard_permissions.py b/posthog/migrations/0203_dashboard_permissions.py index 58936560d68c2..b029b2aeb06f2 100644 --- a/posthog/migrations/0203_dashboard_permissions.py +++ b/posthog/migrations/0203_dashboard_permissions.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0202_descriptions_for_action"), ] diff --git a/posthog/migrations/0204_remove_duplicate_plugin_configs.py b/posthog/migrations/0204_remove_duplicate_plugin_configs.py index c076acc3ef8dd..a9be75e301fe4 100644 --- a/posthog/migrations/0204_remove_duplicate_plugin_configs.py +++ b/posthog/migrations/0204_remove_duplicate_plugin_configs.py @@ -23,7 +23,6 @@ def remove_duplicate_plugin_configs(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0203_dashboard_permissions"), ] diff --git a/posthog/migrations/0205_auto_20220204_1748.py b/posthog/migrations/0205_auto_20220204_1748.py index 8fc138fbd95ea..1b09b6736692b 100644 --- a/posthog/migrations/0205_auto_20220204_1748.py +++ b/posthog/migrations/0205_auto_20220204_1748.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0204_remove_duplicate_plugin_configs"), ] diff --git a/posthog/migrations/0206_global_tags_setup.py b/posthog/migrations/0206_global_tags_setup.py index f977c5106d110..f5927c2baea49 100644 --- a/posthog/migrations/0206_global_tags_setup.py +++ b/posthog/migrations/0206_global_tags_setup.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0205_auto_20220204_1748"), ] @@ -19,11 +18,17 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=255)), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.CreateModel( @@ -32,7 +37,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ( @@ -48,7 +56,9 @@ class Migration(migrations.Migration): ( "tag", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="tagged_items", to="posthog.tag" + on_delete=django.db.models.deletion.CASCADE, + related_name="tagged_items", + to="posthog.tag", ), ), ], @@ -56,7 +66,8 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="taggeditem", constraint=models.CheckConstraint( - check=models.Q(models.Q(("action__isnull", False)), _connector="OR"), name="exactly_one_related_object" + check=models.Q(models.Q(("action__isnull", False)), _connector="OR"), + name="exactly_one_related_object", ), ), migrations.AlterUniqueTogether( diff --git a/posthog/migrations/0207_cohort_count.py b/posthog/migrations/0207_cohort_count.py index 95e985f8c0ec0..794ed5add5ea7 100644 --- a/posthog/migrations/0207_cohort_count.py +++ b/posthog/migrations/0207_cohort_count.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0206_global_tags_setup"), ] diff --git a/posthog/migrations/0208_alter_plugin_updated_at.py b/posthog/migrations/0208_alter_plugin_updated_at.py index 81e0492d6d74d..2e30b98562a35 100644 --- a/posthog/migrations/0208_alter_plugin_updated_at.py +++ b/posthog/migrations/0208_alter_plugin_updated_at.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0207_cohort_count"), ] diff --git a/posthog/migrations/0209_plugin_logs_disabled.py b/posthog/migrations/0209_plugin_logs_disabled.py index 31b8423790ea4..72cbcbb80cbb4 100644 --- a/posthog/migrations/0209_plugin_logs_disabled.py +++ b/posthog/migrations/0209_plugin_logs_disabled.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0208_alter_plugin_updated_at"), ] diff --git a/posthog/migrations/0210_drop_update_person_functions.py b/posthog/migrations/0210_drop_update_person_functions.py index 6d70f4c0cf32b..c036799c5429f 100644 --- a/posthog/migrations/0210_drop_update_person_functions.py +++ b/posthog/migrations/0210_drop_update_person_functions.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0209_plugin_logs_disabled"), ] diff --git a/posthog/migrations/0211_async_migrations_errors_length.py b/posthog/migrations/0211_async_migrations_errors_length.py index 445b85183e7a9..a29e6bd862808 100644 --- a/posthog/migrations/0211_async_migrations_errors_length.py +++ b/posthog/migrations/0211_async_migrations_errors_length.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0210_drop_update_person_functions"), ] diff --git a/posthog/migrations/0212_alter_persondistinctid_team.py b/posthog/migrations/0212_alter_persondistinctid_team.py index 89995547f70db..3f0b7eb908fac 100644 --- a/posthog/migrations/0212_alter_persondistinctid_team.py +++ b/posthog/migrations/0212_alter_persondistinctid_team.py @@ -48,7 +48,9 @@ class Migration(migrations.Migration): model_name="persondistinctid", name="team", field=models.ForeignKey( - db_index=False, on_delete=django.db.models.deletion.CASCADE, to="posthog.team" + db_index=False, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.team", ), ) ], diff --git a/posthog/migrations/0213_deprecated_old_tags.py b/posthog/migrations/0213_deprecated_old_tags.py index d713f0aaa5f45..2efc99288852c 100644 --- a/posthog/migrations/0213_deprecated_old_tags.py +++ b/posthog/migrations/0213_deprecated_old_tags.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0212_alter_persondistinctid_team"), ] @@ -18,14 +17,22 @@ class Migration(migrations.Migration): model_name="dashboard", name="deprecated_tags", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=list, + null=True, + size=None, ), ), migrations.AlterField( model_name="insight", name="deprecated_tags", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=list, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=list, + null=True, + size=None, ), ), migrations.RemoveConstraint( @@ -78,7 +85,16 @@ class Migration(migrations.Migration): ), migrations.AlterUniqueTogether( name="taggeditem", - unique_together={("tag", "dashboard", "insight", "event_definition", "property_definition", "action")}, + unique_together={ + ( + "tag", + "dashboard", + "insight", + "event_definition", + "property_definition", + "action", + ) + }, ), migrations.AddConstraint( model_name="taggeditem", diff --git a/posthog/migrations/0215_add_tags_back.py b/posthog/migrations/0215_add_tags_back.py index 0bd14fee6f5ac..bc66b0997cd60 100644 --- a/posthog/migrations/0215_add_tags_back.py +++ b/posthog/migrations/0215_add_tags_back.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0214_migrate_dashboard_insight_tags"), ] @@ -15,14 +14,22 @@ class Migration(migrations.Migration): model_name="dashboard", name="tags", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=None, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=None, + null=True, + size=None, ), ), migrations.AddField( model_name="insight", name="tags", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=32), blank=True, default=None, null=True, size=None + base_field=models.CharField(max_length=32), + blank=True, + default=None, + null=True, + size=None, ), ), ] diff --git a/posthog/migrations/0216_insight_placeholder_name.py b/posthog/migrations/0216_insight_placeholder_name.py index 43e0a93447550..7d5956a0cfbc6 100644 --- a/posthog/migrations/0216_insight_placeholder_name.py +++ b/posthog/migrations/0216_insight_placeholder_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0215_add_tags_back"), ] diff --git a/posthog/migrations/0217_team_primary_dashboard.py b/posthog/migrations/0217_team_primary_dashboard.py index 98a8f32763983..cb164adf85455 100644 --- a/posthog/migrations/0217_team_primary_dashboard.py +++ b/posthog/migrations/0217_team_primary_dashboard.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0216_insight_placeholder_name"), ] diff --git a/posthog/migrations/0219_migrate_tags_v2.py b/posthog/migrations/0219_migrate_tags_v2.py index 1895d3b86c59d..ecc4f4312a812 100644 --- a/posthog/migrations/0219_migrate_tags_v2.py +++ b/posthog/migrations/0219_migrate_tags_v2.py @@ -33,7 +33,11 @@ def forwards(apps, schema_editor): ) for insight_page in insight_paginator.page_range: - logger.info("insight_tag_batch_get_start", limit=batch_size, offset=(insight_page - 1) * batch_size) + logger.info( + "insight_tag_batch_get_start", + limit=batch_size, + offset=(insight_page - 1) * batch_size, + ) insights = iter(insight_paginator.get_page(insight_page)) for tags, team_id, insight_id in insights: unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "") @@ -55,13 +59,22 @@ def forwards(apps, schema_editor): ) for dashboard_page in dashboard_paginator.page_range: - logger.info("dashboard_tag_batch_get_start", limit=batch_size, offset=(dashboard_page - 1) * batch_size) + logger.info( + "dashboard_tag_batch_get_start", + limit=batch_size, + offset=(dashboard_page - 1) * batch_size, + ) dashboards = iter(dashboard_paginator.get_page(dashboard_page)) for tags, team_id, dashboard_id in dashboards: unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "") for tag in unique_tags: temp_tag = Tag(name=tag, team_id=team_id) - createables.append((temp_tag, TaggedItem(dashboard_id=dashboard_id, tag_id=temp_tag.id))) + createables.append( + ( + temp_tag, + TaggedItem(dashboard_id=dashboard_id, tag_id=temp_tag.id), + ) + ) logger.info("dashboard_tag_get_end", tags_count=len(createables) - num_insight_tags) @@ -94,7 +107,9 @@ def forwards(apps, schema_editor): # Create tag <-> item relationships, ignoring conflicts TaggedItem.objects.bulk_create( - [tagged_item for (_, tagged_item) in createable_batch], ignore_conflicts=True, batch_size=batch_size + [tagged_item for (_, tagged_item) in createable_batch], + ignore_conflicts=True, + batch_size=batch_size, ) logger.info("posthog/0219_migrate_tags_v2_end") diff --git a/posthog/migrations/0220_backfill_primary_dashboards.py b/posthog/migrations/0220_backfill_primary_dashboards.py index 4633c81b90bbf..f32def59bc25c 100644 --- a/posthog/migrations/0220_backfill_primary_dashboards.py +++ b/posthog/migrations/0220_backfill_primary_dashboards.py @@ -10,7 +10,6 @@ def backfill_primary_dashboards(apps, _): team_dashboards = [] with connection.cursor() as cursor: - # Fetch a list of teams and the id of the dashboard that should be set as the primary dashboard # The primary dashboard should be the oldest pinned dashboard, if one exists # or the oldest dashboard, if no pinned dashboards exist diff --git a/posthog/migrations/0221_add_activity_log_model.py b/posthog/migrations/0221_add_activity_log_model.py index dda3d6001c30a..077951fbfda0f 100644 --- a/posthog/migrations/0221_add_activity_log_model.py +++ b/posthog/migrations/0221_add_activity_log_model.py @@ -10,7 +10,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0220_backfill_primary_dashboards"), ] @@ -22,7 +21,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("team_id", models.PositiveIntegerField(null=True)), @@ -33,26 +35,36 @@ class Migration(migrations.Migration): ( "detail", models.JSONField( - encoder=posthog.models.activity_logging.activity_log.ActivityDetailEncoder, null=True + encoder=posthog.models.activity_logging.activity_log.ActivityDetailEncoder, + null=True, ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ( "user", models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ], ), migrations.AddIndex( model_name="activitylog", - index=models.Index(fields=["team_id", "scope", "item_id"], name="posthog_act_team_id_13a0a8_idx"), + index=models.Index( + fields=["team_id", "scope", "item_id"], + name="posthog_act_team_id_13a0a8_idx", + ), ), migrations.AddConstraint( model_name="activitylog", constraint=models.CheckConstraint( - check=models.Q(("team_id__isnull", False), ("organization_id__isnull", False), _connector="OR"), + check=models.Q( + ("team_id__isnull", False), + ("organization_id__isnull", False), + _connector="OR", + ), name="must_have_team_or_organization_id", ), ), diff --git a/posthog/migrations/0222_fix_deleted_primary_dashboards.py b/posthog/migrations/0222_fix_deleted_primary_dashboards.py index 7869597e61475..a65df9e39f2f7 100644 --- a/posthog/migrations/0222_fix_deleted_primary_dashboards.py +++ b/posthog/migrations/0222_fix_deleted_primary_dashboards.py @@ -15,7 +15,6 @@ def fix_for_deleted_primary_dashboards(apps, _): expected_team_dashboards = [] with connection.cursor() as cursor: - # Fetch a list of teams and the id of the dashboard that should be set as the primary dashboard # The primary dashboard should be the oldest pinned dashboard, if one exists # or the oldest dashboard, if no pinned dashboards exist diff --git a/posthog/migrations/0223_organizationdomain.py b/posthog/migrations/0223_organizationdomain.py index 30cef1aca53a0..c46349689ff69 100644 --- a/posthog/migrations/0223_organizationdomain.py +++ b/posthog/migrations/0223_organizationdomain.py @@ -15,12 +15,14 @@ def migrate_domain_whitelist(apps, schema_editor): for organization in Organization.objects.exclude(domain_whitelist=[]): for domain in organization.domain_whitelist: OrganizationDomain.objects.create( - organization=organization, domain=domain, verified_at=timezone.now(), jit_provisioning_enabled=True + organization=organization, + domain=domain, + verified_at=timezone.now(), + jit_provisioning_enabled=True, ) class Migration(migrations.Migration): - dependencies = [ ("posthog", "0222_fix_deleted_primary_dashboards"), ] @@ -32,18 +34,28 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("domain", models.CharField(max_length=128, unique=True)), ( "verification_challenge", models.CharField( - default=posthog.models.organization_domain.generate_verification_challenge, max_length=128 + default=posthog.models.organization_domain.generate_verification_challenge, + max_length=128, ), ), - ("verified_at", models.DateTimeField(blank=True, default=None, null=True)), - ("last_verification_retry", models.DateTimeField(blank=True, default=None, null=True)), + ( + "verified_at", + models.DateTimeField(blank=True, default=None, null=True), + ), + ( + "last_verification_retry", + models.DateTimeField(blank=True, default=None, null=True), + ), ( "jit_provisioning_enabled", models.BooleanField(default=False), @@ -52,7 +64,9 @@ class Migration(migrations.Migration): ( "organization", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="domains", to="posthog.organization" + on_delete=django.db.models.deletion.CASCADE, + related_name="domains", + to="posthog.organization", ), ), ], diff --git a/posthog/migrations/0224_saml_multitenant.py b/posthog/migrations/0224_saml_multitenant.py index 2f942a4caf0d2..b2813acd95827 100644 --- a/posthog/migrations/0224_saml_multitenant.py +++ b/posthog/migrations/0224_saml_multitenant.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0223_organizationdomain"), ] diff --git a/posthog/migrations/0225_insight_viewed.py b/posthog/migrations/0225_insight_viewed.py index e0f1b58456766..7d4195a4fb681 100644 --- a/posthog/migrations/0225_insight_viewed.py +++ b/posthog/migrations/0225_insight_viewed.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0224_saml_multitenant"), ] @@ -15,16 +14,42 @@ class Migration(migrations.Migration): migrations.CreateModel( name="InsightViewed", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("last_viewed_at", models.DateTimeField()), - ("insight", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.insight")), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), - ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "insight", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.insight", + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.AddIndex( model_name="insightviewed", - index=models.Index(fields=["team_id", "user_id", "-last_viewed_at"], name="posthog_ins_team_id_339ee0_idx"), + index=models.Index( + fields=["team_id", "user_id", "-last_viewed_at"], + name="posthog_ins_team_id_339ee0_idx", + ), ), migrations.AddConstraint( model_name="insightviewed", diff --git a/posthog/migrations/0226_longer_action_slack_message_format.py b/posthog/migrations/0226_longer_action_slack_message_format.py index 8f6a1968f2492..b6d0d5d448ecf 100644 --- a/posthog/migrations/0226_longer_action_slack_message_format.py +++ b/posthog/migrations/0226_longer_action_slack_message_format.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0225_insight_viewed"), ] diff --git a/posthog/migrations/0227_add_dashboard_tiles.py b/posthog/migrations/0227_add_dashboard_tiles.py index 88e2258ca455f..5ced1caad6326 100644 --- a/posthog/migrations/0227_add_dashboard_tiles.py +++ b/posthog/migrations/0227_add_dashboard_tiles.py @@ -58,7 +58,6 @@ def reverse(apps, _) -> None: class Migration(migrations.Migration): - dependencies = [ ("posthog", "0226_longer_action_slack_message_format"), ] @@ -67,9 +66,23 @@ class Migration(migrations.Migration): migrations.CreateModel( name="DashboardTile", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("dashboard", models.ForeignKey(on_delete=models.deletion.CASCADE, to="posthog.dashboard")), - ("insight", models.ForeignKey(on_delete=models.deletion.CASCADE, to="posthog.insight")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "dashboard", + models.ForeignKey(on_delete=models.deletion.CASCADE, to="posthog.dashboard"), + ), + ( + "insight", + models.ForeignKey(on_delete=models.deletion.CASCADE, to="posthog.insight"), + ), ("layouts", models.JSONField(default=dict)), ("color", models.CharField(blank=True, max_length=400, null=True)), ], @@ -78,7 +91,10 @@ class Migration(migrations.Migration): model_name="dashboard", name="insights", field=models.ManyToManyField( - blank=True, related_name="dashboards", through="posthog.DashboardTile", to="posthog.Insight" + blank=True, + related_name="dashboards", + through="posthog.DashboardTile", + to="posthog.Insight", ), ), migrations.RunPython(migrate_dashboard_insight_relations, reverse, elidable=True), diff --git a/posthog/migrations/0228_fix_tile_layouts.py b/posthog/migrations/0228_fix_tile_layouts.py index bea976781fe8b..f819390449f04 100644 --- a/posthog/migrations/0228_fix_tile_layouts.py +++ b/posthog/migrations/0228_fix_tile_layouts.py @@ -42,7 +42,6 @@ def migrate_dashboard_insight_relations(apps, _) -> None: class Migration(migrations.Migration): - dependencies = [ ("posthog", "0227_add_dashboard_tiles"), ] diff --git a/posthog/migrations/0229_add_filters_hash_to_dashboard_table.py b/posthog/migrations/0229_add_filters_hash_to_dashboard_table.py index fdb078043a99a..7bbd818dbbafa 100644 --- a/posthog/migrations/0229_add_filters_hash_to_dashboard_table.py +++ b/posthog/migrations/0229_add_filters_hash_to_dashboard_table.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0228_fix_tile_layouts"), ] diff --git a/posthog/migrations/0230_cohort_filters.py b/posthog/migrations/0230_cohort_filters.py index 96dff620cea4c..c695d413372eb 100644 --- a/posthog/migrations/0230_cohort_filters.py +++ b/posthog/migrations/0230_cohort_filters.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0229_add_filters_hash_to_dashboard_table"), ] diff --git a/posthog/migrations/0231_add_refreshing_data_to_tiles.py b/posthog/migrations/0231_add_refreshing_data_to_tiles.py index 90e4171ba8f65..18a31765beead 100644 --- a/posthog/migrations/0231_add_refreshing_data_to_tiles.py +++ b/posthog/migrations/0231_add_refreshing_data_to_tiles.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0230_cohort_filters"), ] diff --git a/posthog/migrations/0232_add_team_person_display_name_properties.py b/posthog/migrations/0232_add_team_person_display_name_properties.py index 1508ea860d7ed..56402418d2a46 100644 --- a/posthog/migrations/0232_add_team_person_display_name_properties.py +++ b/posthog/migrations/0232_add_team_person_display_name_properties.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0231_add_refreshing_data_to_tiles"), ] @@ -15,7 +14,10 @@ class Migration(migrations.Migration): model_name="team", name="person_display_name_properties", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=400), blank=True, null=True, size=None + base_field=models.CharField(max_length=400), + blank=True, + null=True, + size=None, ), ), ] diff --git a/posthog/migrations/0233_plugin_source_file.py b/posthog/migrations/0233_plugin_source_file.py index 2686bb99ca8bc..0e5a13827425a 100644 --- a/posthog/migrations/0233_plugin_source_file.py +++ b/posthog/migrations/0233_plugin_source_file.py @@ -33,7 +33,6 @@ def migrate_plugin_source(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0232_add_team_person_display_name_properties"), ] @@ -45,12 +44,18 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("filename", models.CharField(max_length=200)), ("source", models.TextField(blank=True, null=True)), - ("plugin", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin")), + ( + "plugin", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.plugin"), + ), ], ), migrations.AddConstraint( diff --git a/posthog/migrations/0234_create_plugin_jsons.py b/posthog/migrations/0234_create_plugin_jsons.py index 6d6e3420efbb5..36b12068613e3 100644 --- a/posthog/migrations/0234_create_plugin_jsons.py +++ b/posthog/migrations/0234_create_plugin_jsons.py @@ -27,7 +27,6 @@ def migrate_plugin_source(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0233_plugin_source_file"), ] diff --git a/posthog/migrations/0235_plugin_source_transpilation.py b/posthog/migrations/0235_plugin_source_transpilation.py index 767db8bd4fd6b..a657113cf57ce 100644 --- a/posthog/migrations/0235_plugin_source_transpilation.py +++ b/posthog/migrations/0235_plugin_source_transpilation.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0234_create_plugin_jsons"), ] @@ -20,7 +19,11 @@ class Migration(migrations.Migration): name="status", field=models.CharField( null=True, - choices=[("LOCKED", "locked"), ("TRANSPILED", "transpiled"), ("ERROR", "error")], + choices=[ + ("LOCKED", "locked"), + ("TRANSPILED", "transpiled"), + ("ERROR", "error"), + ], max_length=20, ), ), diff --git a/posthog/migrations/0236_add_instance_setting_model.py b/posthog/migrations/0236_add_instance_setting_model.py index b42aa22cba4da..f41fae6f6f5fd 100644 --- a/posthog/migrations/0236_add_instance_setting_model.py +++ b/posthog/migrations/0236_add_instance_setting_model.py @@ -33,7 +33,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="InstanceSetting", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=128)), ("raw_value", models.CharField(blank=True, max_length=1024)), ], diff --git a/posthog/migrations/0237_remove_timezone_from_teams.py b/posthog/migrations/0237_remove_timezone_from_teams.py index 4bd8f99cf24c4..e4ff58e555d6d 100644 --- a/posthog/migrations/0237_remove_timezone_from_teams.py +++ b/posthog/migrations/0237_remove_timezone_from_teams.py @@ -8,7 +8,6 @@ def reset_team_timezone_to_UTC(apps, _) -> None: class Migration(migrations.Migration): - dependencies = [ ("posthog", "0236_add_instance_setting_model"), ] diff --git a/posthog/migrations/0238_exportedasset.py b/posthog/migrations/0238_exportedasset.py index 92eef7b7faefa..f1ace97313135 100644 --- a/posthog/migrations/0238_exportedasset.py +++ b/posthog/migrations/0238_exportedasset.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0237_remove_timezone_from_teams"), ] @@ -16,7 +15,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="ExportedAsset", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ( "export_format", models.CharField( @@ -41,13 +48,24 @@ class Migration(migrations.Migration): ), ( "dashboard", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", + ), ), ( "insight", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.insight", + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), ] diff --git a/posthog/migrations/0239_delete_postgres_pluginlogentry.py b/posthog/migrations/0239_delete_postgres_pluginlogentry.py index 34dc7a34ca8c8..4161c47e28212 100644 --- a/posthog/migrations/0239_delete_postgres_pluginlogentry.py +++ b/posthog/migrations/0239_delete_postgres_pluginlogentry.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0238_exportedasset"), ] diff --git a/posthog/migrations/0240_organizationinvite_message.py b/posthog/migrations/0240_organizationinvite_message.py index 17cf1a9c22a06..cefc8aa46b5f9 100644 --- a/posthog/migrations/0240_organizationinvite_message.py +++ b/posthog/migrations/0240_organizationinvite_message.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0239_delete_postgres_pluginlogentry"), ] diff --git a/posthog/migrations/0241_subscription.py b/posthog/migrations/0241_subscription.py index 5c85060da6553..8d00dbef43f2d 100644 --- a/posthog/migrations/0241_subscription.py +++ b/posthog/migrations/0241_subscription.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0240_organizationinvite_message"), ] @@ -16,9 +15,20 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Subscription", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("title", models.CharField(blank=True, max_length=100, null=True)), - ("target_type", models.CharField(choices=[("email", "Email")], max_length=10)), + ( + "target_type", + models.CharField(choices=[("email", "Email")], max_length=10), + ), ("target_value", models.TextField()), ( "frequency", @@ -64,18 +74,32 @@ class Migration(migrations.Migration): ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( "dashboard", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", + ), ), ( "insight", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.insight", + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), ] diff --git a/posthog/migrations/0242_team_live_events_columns.py b/posthog/migrations/0242_team_live_events_columns.py index 8dee3fe2fd900..e29c482738406 100644 --- a/posthog/migrations/0242_team_live_events_columns.py +++ b/posthog/migrations/0242_team_live_events_columns.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0241_subscription"), ] diff --git a/posthog/migrations/0243_unpack_plugin_source_files.py b/posthog/migrations/0243_unpack_plugin_source_files.py index d0e10ef482302..58aac54e753a9 100644 --- a/posthog/migrations/0243_unpack_plugin_source_files.py +++ b/posthog/migrations/0243_unpack_plugin_source_files.py @@ -56,7 +56,9 @@ def sync_from_plugin_archive(plugin): ) else: logger.debug( - "Migration 0243 - extracted and saved code of plugin.", plugin=plugin.name, plugin_id=plugin.id + "Migration 0243 - extracted and saved code of plugin.", + plugin=plugin.name, + plugin_id=plugin.id, ) logger.info("Migration 0243 - finished") @@ -70,7 +72,6 @@ def reverse_func(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0242_team_live_events_columns"), ] diff --git a/posthog/migrations/0244_drop_should_update_person_prop.py b/posthog/migrations/0244_drop_should_update_person_prop.py index 40dc0f451e916..210e46ce4d36d 100644 --- a/posthog/migrations/0244_drop_should_update_person_prop.py +++ b/posthog/migrations/0244_drop_should_update_person_prop.py @@ -2,7 +2,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0243_unpack_plugin_source_files"), ] diff --git a/posthog/migrations/0245_silence_deprecated_tags_warnings.py b/posthog/migrations/0245_silence_deprecated_tags_warnings.py index f39278900d75c..56c2a042db092 100644 --- a/posthog/migrations/0245_silence_deprecated_tags_warnings.py +++ b/posthog/migrations/0245_silence_deprecated_tags_warnings.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0244_drop_should_update_person_prop"), ] diff --git a/posthog/migrations/0246_integrations.py b/posthog/migrations/0246_integrations.py index d80dd3e6040d7..9bc1f0cb4b6eb 100644 --- a/posthog/migrations/0246_integrations.py +++ b/posthog/migrations/0246_integrations.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0245_silence_deprecated_tags_warnings"), ] @@ -16,13 +15,26 @@ class Migration(migrations.Migration): model_name="subscription", name="target_type", field=models.CharField( - choices=[("email", "Email"), ("slack", "Slack"), ("webhook", "Webhook")], max_length=10 + choices=[ + ("email", "Email"), + ("slack", "Slack"), + ("webhook", "Webhook"), + ], + max_length=10, ), ), migrations.CreateModel( name="Integration", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("kind", models.CharField(choices=[("slack", "Slack")], max_length=10)), ("config", models.JSONField(default=dict)), ("sensitive_config", models.JSONField(default=dict)), @@ -31,10 +43,16 @@ class Migration(migrations.Migration): ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), ] diff --git a/posthog/migrations/0247_feature_flags_experience_continuity.py b/posthog/migrations/0247_feature_flags_experience_continuity.py index 31a5c471c1fdb..f23365acf60fe 100644 --- a/posthog/migrations/0247_feature_flags_experience_continuity.py +++ b/posthog/migrations/0247_feature_flags_experience_continuity.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0246_integrations"), ] @@ -19,17 +18,32 @@ class Migration(migrations.Migration): migrations.CreateModel( name="FeatureFlagHashKeyOverride", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("feature_flag_key", models.CharField(max_length=400)), ("hash_key", models.CharField(max_length=400)), - ("person", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person")), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "person", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person"), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddConstraint( model_name="featureflaghashkeyoverride", constraint=models.UniqueConstraint( - fields=("team", "person", "feature_flag_key"), name="Unique hash_key for a user/team/feature_flag combo" + fields=("team", "person", "feature_flag_key"), + name="Unique hash_key for a user/team/feature_flag combo", ), ), ] diff --git a/posthog/migrations/0248_add_context_for_csv_exports.py b/posthog/migrations/0248_add_context_for_csv_exports.py index ad5891165b54e..9b86fee68a8f2 100644 --- a/posthog/migrations/0248_add_context_for_csv_exports.py +++ b/posthog/migrations/0248_add_context_for_csv_exports.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0247_feature_flags_experience_continuity"), ] diff --git a/posthog/migrations/0249_add_sharingconfiguration.py b/posthog/migrations/0249_add_sharingconfiguration.py index 8ee425789c583..ad3f6ccf209f3 100644 --- a/posthog/migrations/0249_add_sharingconfiguration.py +++ b/posthog/migrations/0249_add_sharingconfiguration.py @@ -29,7 +29,6 @@ def reverse(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0248_add_context_for_csv_exports"), ] @@ -38,7 +37,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="SharingConfiguration", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True)), ("enabled", models.BooleanField(default=False)), ( @@ -53,13 +60,24 @@ class Migration(migrations.Migration): ), ( "dashboard", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", + ), ), ( "insight", - models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.insight"), + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.insight", + ), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), ], ), migrations.RunPython(create_sharing_configurations, reverse, elidable=True), diff --git a/posthog/migrations/0250_exportedasset_created_by.py b/posthog/migrations/0250_exportedasset_created_by.py index e0f21ea5a447e..72a9984ab53fd 100644 --- a/posthog/migrations/0250_exportedasset_created_by.py +++ b/posthog/migrations/0250_exportedasset_created_by.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0249_add_sharingconfiguration"), ] @@ -16,7 +15,10 @@ class Migration(migrations.Migration): model_name="exportedasset", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ] diff --git a/posthog/migrations/0251_event_buffer.py b/posthog/migrations/0251_event_buffer.py index af6b8f3b105b6..c0323f3057ce8 100644 --- a/posthog/migrations/0251_event_buffer.py +++ b/posthog/migrations/0251_event_buffer.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0250_exportedasset_created_by"), ] @@ -13,7 +12,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="EventBuffer", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("event", models.JSONField(null=True, blank=True)), ("process_at", models.DateTimeField()), ("locked", models.BooleanField()), diff --git a/posthog/migrations/0252_reset_insight_refreshing_status.py b/posthog/migrations/0252_reset_insight_refreshing_status.py index 140128632f575..abfef85709b30 100644 --- a/posthog/migrations/0252_reset_insight_refreshing_status.py +++ b/posthog/migrations/0252_reset_insight_refreshing_status.py @@ -16,7 +16,6 @@ def reverse(_apps, _schema_editor) -> None: class Migration(migrations.Migration): - dependencies = [ ("posthog", "0251_event_buffer"), ] diff --git a/posthog/migrations/0253_add_async_migration_parameters.py b/posthog/migrations/0253_add_async_migration_parameters.py index a81e43700dee4..d96735c67c483 100644 --- a/posthog/migrations/0253_add_async_migration_parameters.py +++ b/posthog/migrations/0253_add_async_migration_parameters.py @@ -10,7 +10,6 @@ def describe(self): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0252_reset_insight_refreshing_status"), ] diff --git a/posthog/migrations/0254_prompt_sequence_state.py b/posthog/migrations/0254_prompt_sequence_state.py index 94b920a964a8a..e10e4a2530b4d 100644 --- a/posthog/migrations/0254_prompt_sequence_state.py +++ b/posthog/migrations/0254_prompt_sequence_state.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0253_add_async_migration_parameters"), ] @@ -15,20 +14,38 @@ class Migration(migrations.Migration): migrations.CreateModel( name="PromptSequenceState", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=400)), - ("last_updated_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "last_updated_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ("step", models.IntegerField(default=0)), ("completed", models.BooleanField(default=False)), ("dismissed", models.BooleanField(default=False)), - ("person", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person")), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "person", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.person"), + ), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddConstraint( model_name="promptsequencestate", constraint=models.UniqueConstraint( - fields=("team", "person", "key"), name="unique sequence key for person for team" + fields=("team", "person", "key"), + name="unique sequence key for person for team", ), ), ] diff --git a/posthog/migrations/0255_user_prompt_sequence_state.py b/posthog/migrations/0255_user_prompt_sequence_state.py index 9c1d2ec48f7fe..ebfe3dc5e6363 100644 --- a/posthog/migrations/0255_user_prompt_sequence_state.py +++ b/posthog/migrations/0255_user_prompt_sequence_state.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0254_prompt_sequence_state"), ] @@ -16,13 +15,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name="UserPromptSequenceState", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=400)), - ("last_updated_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "last_updated_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ("step", models.IntegerField(default=0)), ("completed", models.BooleanField(default=False)), ("dismissed", models.BooleanField(default=False)), - ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.AddConstraint( diff --git a/posthog/migrations/0256_add_async_deletion_model.py b/posthog/migrations/0256_add_async_deletion_model.py index dcc035f70bc08..636fe554b15d5 100644 --- a/posthog/migrations/0256_add_async_deletion_model.py +++ b/posthog/migrations/0256_add_async_deletion_model.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0255_user_prompt_sequence_state"), ] @@ -24,10 +23,15 @@ class Migration(migrations.Migration): ( "created_by", models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddIndex( @@ -45,7 +49,8 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="asyncdeletion", constraint=models.UniqueConstraint( - fields=("deletion_type", "key", "group_type_index"), name="unique deletion for groups" + fields=("deletion_type", "key", "group_type_index"), + name="unique deletion for groups", ), ), ] diff --git a/posthog/migrations/0257_add_default_checked_for_test_filters_on_team.py b/posthog/migrations/0257_add_default_checked_for_test_filters_on_team.py index ccf6d725f15a3..b513fb4713dd5 100644 --- a/posthog/migrations/0257_add_default_checked_for_test_filters_on_team.py +++ b/posthog/migrations/0257_add_default_checked_for_test_filters_on_team.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0256_add_async_deletion_model"), ] diff --git a/posthog/migrations/0258_team_recording_domains.py b/posthog/migrations/0258_team_recording_domains.py index 510cf5500bb26..9ae7931f1048a 100644 --- a/posthog/migrations/0258_team_recording_domains.py +++ b/posthog/migrations/0258_team_recording_domains.py @@ -3,7 +3,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0257_add_default_checked_for_test_filters_on_team"), ] @@ -13,7 +12,10 @@ class Migration(migrations.Migration): model_name="team", name="recording_domains", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=200, null=True), blank=True, null=True, size=None + base_field=models.CharField(max_length=200, null=True), + blank=True, + null=True, + size=None, ), ), ] diff --git a/posthog/migrations/0259_backfill_team_recording_domains.py b/posthog/migrations/0259_backfill_team_recording_domains.py index 8589dcc83de02..1f0dcba4f08f8 100644 --- a/posthog/migrations/0259_backfill_team_recording_domains.py +++ b/posthog/migrations/0259_backfill_team_recording_domains.py @@ -45,7 +45,6 @@ def reverse(apps, _): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0258_team_recording_domains"), ] diff --git a/posthog/migrations/0260_pak_v2.py b/posthog/migrations/0260_pak_v2.py index abd6b490b303c..02fbd842ed39a 100644 --- a/posthog/migrations/0260_pak_v2.py +++ b/posthog/migrations/0260_pak_v2.py @@ -20,7 +20,6 @@ def hash_all_keys(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0259_backfill_team_recording_domains"), ] diff --git a/posthog/migrations/0261_team_capture_console_log_opt_in.py b/posthog/migrations/0261_team_capture_console_log_opt_in.py index 9bca3ca244582..92f202606cea9 100644 --- a/posthog/migrations/0261_team_capture_console_log_opt_in.py +++ b/posthog/migrations/0261_team_capture_console_log_opt_in.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0260_pak_v2"), ] diff --git a/posthog/migrations/0262_track_viewed_notifications.py b/posthog/migrations/0262_track_viewed_notifications.py index 441d6472514f2..a333ce35688ea 100644 --- a/posthog/migrations/0262_track_viewed_notifications.py +++ b/posthog/migrations/0262_track_viewed_notifications.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0261_team_capture_console_log_opt_in"), ] @@ -20,14 +19,19 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("last_viewed_activity_date", models.DateTimeField(default=None)), ( "user", models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ], diff --git a/posthog/migrations/0263_plugin_config_web_token.py b/posthog/migrations/0263_plugin_config_web_token.py index 1600ef55c4f5d..79aabbaf0d3c9 100644 --- a/posthog/migrations/0263_plugin_config_web_token.py +++ b/posthog/migrations/0263_plugin_config_web_token.py @@ -14,7 +14,6 @@ def forwards_func(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0262_track_viewed_notifications"), ] diff --git a/posthog/migrations/0264_user_partial_notification_settings.py b/posthog/migrations/0264_user_partial_notification_settings.py index 53984cb854b21..c8f8b9c727fd8 100644 --- a/posthog/migrations/0264_user_partial_notification_settings.py +++ b/posthog/migrations/0264_user_partial_notification_settings.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0263_plugin_config_web_token"), ] diff --git a/posthog/migrations/0265_related_tiles.py b/posthog/migrations/0265_related_tiles.py index a161e58ec4d6e..55a5958054c00 100644 --- a/posthog/migrations/0265_related_tiles.py +++ b/posthog/migrations/0265_related_tiles.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0264_user_partial_notification_settings"), ] @@ -16,7 +15,9 @@ class Migration(migrations.Migration): model_name="dashboardtile", name="dashboard", field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="tiles", to="posthog.dashboard" + on_delete=django.db.models.deletion.CASCADE, + related_name="tiles", + to="posthog.dashboard", ), ), ] diff --git a/posthog/migrations/0266_add_is_system_field_to_activity_log.py b/posthog/migrations/0266_add_is_system_field_to_activity_log.py index 539307c29a4db..b6716d09b3c55 100644 --- a/posthog/migrations/0266_add_is_system_field_to_activity_log.py +++ b/posthog/migrations/0266_add_is_system_field_to_activity_log.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0265_related_tiles"), ] diff --git a/posthog/migrations/0267_add_text_tiles.py b/posthog/migrations/0267_add_text_tiles.py index 808f7020e6500..b33ffea3d4905 100644 --- a/posthog/migrations/0267_add_text_tiles.py +++ b/posthog/migrations/0267_add_text_tiles.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0266_add_is_system_field_to_activity_log"), ] @@ -16,9 +15,20 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Text", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("body", models.CharField(blank=True, max_length=4000, null=True)), - ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "last_modified_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ], ), # allow null and add related name to the field @@ -36,7 +46,10 @@ class Migration(migrations.Migration): model_name="text", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( @@ -78,7 +91,9 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="dashboardtile", constraint=models.UniqueConstraint( - condition=models.Q(("text__isnull", False)), fields=("dashboard", "text"), name="unique_dashboard_text" + condition=models.Q(("text__isnull", False)), + fields=("dashboard", "text"), + name="unique_dashboard_text", ), ), # can't have both insight and text on a tile diff --git a/posthog/migrations/0268_plugin_source_file_updated_at.py b/posthog/migrations/0268_plugin_source_file_updated_at.py index 29bb3d78e98c0..c9e23fd601222 100644 --- a/posthog/migrations/0268_plugin_source_file_updated_at.py +++ b/posthog/migrations/0268_plugin_source_file_updated_at.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0267_add_text_tiles"), ] diff --git a/posthog/migrations/0269_soft_delete_tiles.py b/posthog/migrations/0269_soft_delete_tiles.py index fd12b437a9b90..6c02e4aba196a 100644 --- a/posthog/migrations/0269_soft_delete_tiles.py +++ b/posthog/migrations/0269_soft_delete_tiles.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0268_plugin_source_file_updated_at"), ] diff --git a/posthog/migrations/0270_add_uploaded_media.py b/posthog/migrations/0270_add_uploaded_media.py index eac6c04e6c5d5..4b202c85757e8 100644 --- a/posthog/migrations/0270_add_uploaded_media.py +++ b/posthog/migrations/0270_add_uploaded_media.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0269_soft_delete_tiles"), ] @@ -20,20 +19,35 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("created_at", models.DateTimeField(auto_now_add=True)), - ("media_location", models.TextField(blank=True, max_length=1000, null=True)), - ("content_type", models.TextField(blank=True, max_length=100, null=True)), + ( + "media_location", + models.TextField(blank=True, max_length=1000, null=True), + ), + ( + "content_type", + models.TextField(blank=True, max_length=100, null=True), + ), ("file_name", models.TextField(blank=True, max_length=1000, null=True)), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], options={ "abstract": False, diff --git a/posthog/migrations/0271_delete_promptsequencestate.py b/posthog/migrations/0271_delete_promptsequencestate.py index cd1df66344aa7..479482c1981b5 100644 --- a/posthog/migrations/0271_delete_promptsequencestate.py +++ b/posthog/migrations/0271_delete_promptsequencestate.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0270_add_uploaded_media"), ] diff --git a/posthog/migrations/0272_alter_organization_plugins_access_level.py b/posthog/migrations/0272_alter_organization_plugins_access_level.py index 13f56d6f21b90..5f7fee6db4de3 100644 --- a/posthog/migrations/0272_alter_organization_plugins_access_level.py +++ b/posthog/migrations/0272_alter_organization_plugins_access_level.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0271_delete_promptsequencestate"), ] @@ -15,7 +14,8 @@ class Migration(migrations.Migration): model_name="organization", name="plugins_access_level", field=models.PositiveSmallIntegerField( - choices=[(0, "none"), (3, "config"), (6, "install"), (9, "root")], default=3 + choices=[(0, "none"), (3, "config"), (6, "install"), (9, "root")], + default=3, ), ), ] diff --git a/posthog/migrations/0273_mark_inactive_exports_as_finished.py b/posthog/migrations/0273_mark_inactive_exports_as_finished.py index 324a6fdcd2683..fcf024f5e6281 100644 --- a/posthog/migrations/0273_mark_inactive_exports_as_finished.py +++ b/posthog/migrations/0273_mark_inactive_exports_as_finished.py @@ -32,7 +32,12 @@ def should_verify_if_ongoing(start_entry, finished_exports): else: finished_exports.add(key(entry)) - start_entries = list(filter(lambda entry: should_verify_if_ongoing(entry, finished_exports), start_entries)) + start_entries = list( + filter( + lambda entry: should_verify_if_ongoing(entry, finished_exports), + start_entries, + ) + ) for entry in start_entries: expected_running_job_id = entry.detail["trigger"]["job_id"] diff --git a/posthog/migrations/0274_add_plugin_icon_and_rewrite_urls.py b/posthog/migrations/0274_add_plugin_icon_and_rewrite_urls.py index ee0b095199239..683937de53ac8 100644 --- a/posthog/migrations/0274_add_plugin_icon_and_rewrite_urls.py +++ b/posthog/migrations/0274_add_plugin_icon_and_rewrite_urls.py @@ -2,7 +2,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0273_mark_inactive_exports_as_finished"), ] diff --git a/posthog/migrations/0275_feature_flag_rollback_fields.py b/posthog/migrations/0275_feature_flag_rollback_fields.py index 6e938205fcad4..9078a175497a9 100644 --- a/posthog/migrations/0275_feature_flag_rollback_fields.py +++ b/posthog/migrations/0275_feature_flag_rollback_fields.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0274_add_plugin_icon_and_rewrite_urls"), ] diff --git a/posthog/migrations/0276_organization_usage.py b/posthog/migrations/0276_organization_usage.py index 1aa80ff6c5cf1..0e46fb7f50e7f 100644 --- a/posthog/migrations/0276_organization_usage.py +++ b/posthog/migrations/0276_organization_usage.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0275_feature_flag_rollback_fields"), ] diff --git a/posthog/migrations/0277_recording_playlist_model.py b/posthog/migrations/0277_recording_playlist_model.py index 137e9c0c75727..3a7b1ea2fd68c 100644 --- a/posthog/migrations/0277_recording_playlist_model.py +++ b/posthog/migrations/0277_recording_playlist_model.py @@ -9,7 +9,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0276_organization_usage"), ] @@ -18,20 +17,44 @@ class Migration(migrations.Migration): migrations.CreateModel( name="SessionRecordingPlaylist", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("short_id", models.CharField(blank=True, default=posthog.utils.generate_short_id, max_length=12)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "short_id", + models.CharField( + blank=True, + default=posthog.utils.generate_short_id, + max_length=12, + ), + ), ("name", models.CharField(blank=True, max_length=400, null=True)), - ("derived_name", models.CharField(blank=True, max_length=400, null=True)), + ( + "derived_name", + models.CharField(blank=True, max_length=400, null=True), + ), ("description", models.TextField(blank=True)), ("pinned", models.BooleanField(default=False)), ("deleted", models.BooleanField(default=False)), ("filters", models.JSONField(default=dict)), ("created_at", models.DateTimeField(auto_now_add=True)), - ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "last_modified_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( @@ -44,7 +67,10 @@ class Migration(migrations.Migration): to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], options={ "unique_together": {("team", "short_id")}, diff --git a/posthog/migrations/0278_organization_customer_id.py b/posthog/migrations/0278_organization_customer_id.py index bebede0dcb936..76e65bf416d02 100644 --- a/posthog/migrations/0278_organization_customer_id.py +++ b/posthog/migrations/0278_organization_customer_id.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0277_recording_playlist_model"), ] diff --git a/posthog/migrations/0279_recording_playlist_item_model.py b/posthog/migrations/0279_recording_playlist_item_model.py index cc8a359c9cca8..6bf9e98877bcd 100644 --- a/posthog/migrations/0279_recording_playlist_item_model.py +++ b/posthog/migrations/0279_recording_playlist_item_model.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0278_organization_customer_id"), ] @@ -19,7 +18,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="SessionRecordingPlaylistItem", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("session_id", models.CharField(max_length=200)), ("created_at", models.DateTimeField(auto_now_add=True)), ("deleted", models.BooleanField(blank=True, null=True)), diff --git a/posthog/migrations/0280_fix_async_deletion_team.py b/posthog/migrations/0280_fix_async_deletion_team.py index 98c1b3a81b9a9..9b218d0aad5eb 100644 --- a/posthog/migrations/0280_fix_async_deletion_team.py +++ b/posthog/migrations/0280_fix_async_deletion_team.py @@ -10,7 +10,6 @@ def describe(self): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0279_recording_playlist_item_model"), ] diff --git a/posthog/migrations/0281_create_insight_caching_state_model.py b/posthog/migrations/0281_create_insight_caching_state_model.py index d8fb19f00a79c..9d176840bd954 100644 --- a/posthog/migrations/0281_create_insight_caching_state_model.py +++ b/posthog/migrations/0281_create_insight_caching_state_model.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0280_fix_async_deletion_team"), ] @@ -19,7 +18,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("cache_key", models.CharField(max_length=400)), @@ -41,10 +43,15 @@ class Migration(migrations.Migration): ( "insight", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, related_name="caching_state", to="posthog.insight" + on_delete=django.db.models.deletion.CASCADE, + related_name="caching_state", + to="posthog.insight", ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddIndex( diff --git a/posthog/migrations/0282_fix_insight_caching_state_model.py b/posthog/migrations/0282_fix_insight_caching_state_model.py index c8f06becdcf7c..68606d0329dce 100644 --- a/posthog/migrations/0282_fix_insight_caching_state_model.py +++ b/posthog/migrations/0282_fix_insight_caching_state_model.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0281_create_insight_caching_state_model"), ] @@ -35,14 +34,19 @@ class Migration(migrations.Migration): model_name="insightcachingstate", name="dashboard_tile", field=models.ForeignKey( - null=True, on_delete=models.deletion.CASCADE, related_name="caching_states", to="posthog.dashboardtile" + null=True, + on_delete=models.deletion.CASCADE, + related_name="caching_states", + to="posthog.dashboardtile", ), ), migrations.AlterField( model_name="insightcachingstate", name="insight", field=models.ForeignKey( - on_delete=models.deletion.CASCADE, related_name="caching_states", to="posthog.insight" + on_delete=models.deletion.CASCADE, + related_name="caching_states", + to="posthog.insight", ), ), ] diff --git a/posthog/migrations/0283_prompt_sequence_model.py b/posthog/migrations/0283_prompt_sequence_model.py index 49378f62d32dd..16d29c076e483 100644 --- a/posthog/migrations/0283_prompt_sequence_model.py +++ b/posthog/migrations/0283_prompt_sequence_model.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0282_fix_insight_caching_state_model"), ] @@ -17,21 +16,40 @@ class Migration(migrations.Migration): migrations.CreateModel( name="Prompt", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("step", models.IntegerField()), ("type", models.CharField(max_length=200)), ("title", models.CharField(max_length=200)), ("text", models.CharField(max_length=1000)), ("placement", models.CharField(default="top", max_length=200)), ("buttons", models.JSONField()), - ("reference", models.CharField(default=None, max_length=200, null=True)), + ( + "reference", + models.CharField(default=None, max_length=200, null=True), + ), ("icon", models.CharField(max_length=200)), ], ), migrations.CreateModel( name="PromptSequence", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("key", models.CharField(max_length=200)), ("type", models.CharField(max_length=200)), ( @@ -45,23 +63,46 @@ class Migration(migrations.Migration): ("status", models.CharField(max_length=200)), ("requires_opt_in", models.BooleanField(default=False)), ("autorun", models.BooleanField(default=True)), - ("must_have_completed", models.ManyToManyField(blank=True, to="posthog.PromptSequence")), + ( + "must_have_completed", + models.ManyToManyField(blank=True, to="posthog.PromptSequence"), + ), ("prompts", models.ManyToManyField(to="posthog.Prompt")), ], ), migrations.CreateModel( name="UserPromptState", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("last_updated_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "last_updated_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ("step", models.IntegerField(default=None, null=True)), ("completed", models.BooleanField(default=False)), ("dismissed", models.BooleanField(default=False)), ( "sequence", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.promptsequence"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.promptsequence", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), ), - ("user", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.DeleteModel( diff --git a/posthog/migrations/0285_capture_performance_opt_in.py b/posthog/migrations/0285_capture_performance_opt_in.py index ba1673cef18f2..9f478625b7be2 100644 --- a/posthog/migrations/0285_capture_performance_opt_in.py +++ b/posthog/migrations/0285_capture_performance_opt_in.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0284_improved_caching_state_idx"), ] diff --git a/posthog/migrations/0287_add_session_recording_model.py b/posthog/migrations/0287_add_session_recording_model.py index 28bcd68907815..ca2ecb40a642c 100644 --- a/posthog/migrations/0287_add_session_recording_model.py +++ b/posthog/migrations/0287_add_session_recording_model.py @@ -24,7 +24,10 @@ def migrate_playlist_item_recording_relations(apps, _) -> None: Recording.objects.bulk_create( [ - Recording(session_id=playlist_item_object.session_id, team=playlist_item_object.playlist.team) + Recording( + session_id=playlist_item_object.session_id, + team=playlist_item_object.playlist.team, + ) for playlist_item_object in playlist_items ], ignore_conflicts=True, @@ -44,7 +47,6 @@ def reverse(apps, _) -> None: class Migration(migrations.Migration): - dependencies = [ ("posthog", "0286_index_insightcachingstate_lookup"), ] @@ -72,12 +74,18 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("session_id", models.CharField(max_length=200, unique=True)), ("created_at", models.DateTimeField(auto_now_add=True, null=True)), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], options={ "unique_together": {("team", "session_id")}, diff --git a/posthog/migrations/0288_add_session_recording_persistence.py b/posthog/migrations/0288_add_session_recording_persistence.py index 3bf5226e91025..785346ecc2c10 100644 --- a/posthog/migrations/0288_add_session_recording_persistence.py +++ b/posthog/migrations/0288_add_session_recording_persistence.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0287_add_session_recording_model"), ] diff --git a/posthog/migrations/0289_add_tags_to_feature_flags.py b/posthog/migrations/0289_add_tags_to_feature_flags.py index debf52c006c03..913cbf6c99a36 100644 --- a/posthog/migrations/0289_add_tags_to_feature_flags.py +++ b/posthog/migrations/0289_add_tags_to_feature_flags.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0288_add_session_recording_persistence"), ] @@ -29,7 +28,15 @@ class Migration(migrations.Migration): migrations.AlterUniqueTogether( name="taggeditem", unique_together={ - ("tag", "dashboard", "insight", "event_definition", "property_definition", "action", "feature_flag") + ( + "tag", + "dashboard", + "insight", + "event_definition", + "property_definition", + "action", + "feature_flag", + ) }, ), migrations.AddConstraint( diff --git a/posthog/migrations/0290_add_dashboard_templates.py b/posthog/migrations/0290_add_dashboard_templates.py index 65f8835a89cdd..736a495c8747e 100644 --- a/posthog/migrations/0290_add_dashboard_templates.py +++ b/posthog/migrations/0290_add_dashboard_templates.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0289_add_tags_to_feature_flags"), ] @@ -20,7 +19,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("template_name", models.CharField(max_length=400, null=True)), @@ -30,11 +32,20 @@ class Migration(migrations.Migration): ( "tags", django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=255), default=list, size=None + base_field=models.CharField(max_length=255), + default=list, + size=None, ), ), ("github_url", models.CharField(max_length=8201, null=True)), - ("team", models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.team", + ), + ), ], ), migrations.AddConstraint( diff --git a/posthog/migrations/0291_create_person_override_model.py b/posthog/migrations/0291_create_person_override_model.py index 59b7b9e5268fb..81c4191a25be3 100644 --- a/posthog/migrations/0291_create_person_override_model.py +++ b/posthog/migrations/0291_create_person_override_model.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0290_add_dashboard_templates"), ] @@ -14,18 +13,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name="PersonOverride", fields=[ - ("id", models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("old_person_id", models.UUIDField(db_index=True)), ("override_person_id", models.UUIDField(db_index=True)), ("oldest_event", models.DateTimeField()), ("version", models.BigIntegerField(blank=True, null=True)), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddConstraint( model_name="personoverride", constraint=models.UniqueConstraint( - fields=("team", "old_person_id"), name="unique override per old_person_id" + fields=("team", "old_person_id"), + name="unique override per old_person_id", ), ), ] diff --git a/posthog/migrations/0292_property_definitions_persons_and_groups_support.py b/posthog/migrations/0292_property_definitions_persons_and_groups_support.py index cde16d28f1840..d57a95ffa75de 100644 --- a/posthog/migrations/0292_property_definitions_persons_and_groups_support.py +++ b/posthog/migrations/0292_property_definitions_persons_and_groups_support.py @@ -35,7 +35,9 @@ class Migration(migrations.Migration): model_name="propertydefinition", constraint=models.CheckConstraint( check=models.Q( - models.Q(("type", 3), _negated=True), ("group_type_index__isnull", False), _connector="OR" + models.Q(("type", 3), _negated=True), + ("group_type_index__isnull", False), + _connector="OR", ), name="group_type_index_set", ), diff --git a/posthog/migrations/0293_property_definitions_drop_old_constraint.py b/posthog/migrations/0293_property_definitions_drop_old_constraint.py index f1d623521c903..9da2dbdfbe217 100644 --- a/posthog/migrations/0293_property_definitions_drop_old_constraint.py +++ b/posthog/migrations/0293_property_definitions_drop_old_constraint.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0292_property_definitions_persons_and_groups_support"), ] diff --git a/posthog/migrations/0294_plugin_blank_fields.py b/posthog/migrations/0294_plugin_blank_fields.py index 053fbb1d48ac0..7f519b9c021d0 100644 --- a/posthog/migrations/0294_plugin_blank_fields.py +++ b/posthog/migrations/0294_plugin_blank_fields.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0293_property_definitions_drop_old_constraint"), ] diff --git a/posthog/migrations/0295_plugin_allow_blank_config_schema.py b/posthog/migrations/0295_plugin_allow_blank_config_schema.py index 8952f8a252fad..4c8de8d40ef26 100644 --- a/posthog/migrations/0295_plugin_allow_blank_config_schema.py +++ b/posthog/migrations/0295_plugin_allow_blank_config_schema.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0294_plugin_blank_fields"), ] diff --git a/posthog/migrations/0296_team_allow_blank_fields.py b/posthog/migrations/0296_team_allow_blank_fields.py index f6c9065580709..9c593b68404ba 100644 --- a/posthog/migrations/0296_team_allow_blank_fields.py +++ b/posthog/migrations/0296_team_allow_blank_fields.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0295_plugin_allow_blank_config_schema"), ] diff --git a/posthog/migrations/0298_add_insight_queries.py b/posthog/migrations/0298_add_insight_queries.py index 48d3475819d91..82c04e7388164 100644 --- a/posthog/migrations/0298_add_insight_queries.py +++ b/posthog/migrations/0298_add_insight_queries.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0297_property_definitions_index_query"), ] diff --git a/posthog/migrations/0299_set_templates_global.py b/posthog/migrations/0299_set_templates_global.py index 4cdaf2f2c645f..7ca39b1814f35 100644 --- a/posthog/migrations/0299_set_templates_global.py +++ b/posthog/migrations/0299_set_templates_global.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0298_add_insight_queries"), ] diff --git a/posthog/migrations/0300_add_constraints_to_person_override.py b/posthog/migrations/0300_add_constraints_to_person_override.py index 91716cd8cc292..1f54ee839514a 100644 --- a/posthog/migrations/0300_add_constraints_to_person_override.py +++ b/posthog/migrations/0300_add_constraints_to_person_override.py @@ -31,7 +31,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0299_set_templates_global"), ] @@ -45,7 +44,11 @@ class Migration(migrations.Migration): model_name="personoverride", constraint=models.CheckConstraint( check=models.Q( - ("old_person_id__exact", django.db.models.expressions.F("override_person_id")), _negated=True + ( + "old_person_id__exact", + django.db.models.expressions.F("override_person_id"), + ), + _negated=True, ), name="old_person_id_different_from_override_person_id", ), diff --git a/posthog/migrations/0301_organization_enforce_2fa.py b/posthog/migrations/0301_organization_enforce_2fa.py index 21885de4fd954..43ae649c9d298 100644 --- a/posthog/migrations/0301_organization_enforce_2fa.py +++ b/posthog/migrations/0301_organization_enforce_2fa.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0300_add_constraints_to_person_override"), ] diff --git a/posthog/migrations/0302_add_user_pending_email_and_is_verified.py b/posthog/migrations/0302_add_user_pending_email_and_is_verified.py index e318a48cce19b..1c779dc12df73 100644 --- a/posthog/migrations/0302_add_user_pending_email_and_is_verified.py +++ b/posthog/migrations/0302_add_user_pending_email_and_is_verified.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0301_organization_enforce_2fa"), ] @@ -19,7 +18,10 @@ class Migration(migrations.Migration): model_name="user", name="pending_email", field=models.EmailField( - blank=True, max_length=254, null=True, verbose_name="pending email address awaiting verification" + blank=True, + max_length=254, + null=True, + verbose_name="pending email address awaiting verification", ), ), ] diff --git a/posthog/migrations/0303_team_session_recording_version.py b/posthog/migrations/0303_team_session_recording_version.py index b0517f1506d38..29469557f3b5f 100644 --- a/posthog/migrations/0303_team_session_recording_version.py +++ b/posthog/migrations/0303_team_session_recording_version.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0302_add_user_pending_email_and_is_verified"), ] diff --git a/posthog/migrations/0304_store_dashboard_template_in_db.py b/posthog/migrations/0304_store_dashboard_template_in_db.py index 6097f8761a29c..997ce8aab1bb8 100644 --- a/posthog/migrations/0304_store_dashboard_template_in_db.py +++ b/posthog/migrations/0304_store_dashboard_template_in_db.py @@ -13,7 +13,6 @@ def describe(self): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0303_team_session_recording_version"), ] @@ -28,7 +27,10 @@ class Migration(migrations.Migration): model_name="dashboardtemplate", name="created_by", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), migrations.AddField( @@ -66,7 +68,10 @@ class Migration(migrations.Migration): model_name="dashboardtemplate", name="tags", field=django.contrib.postgres.fields.ArrayField( - base_field=models.CharField(max_length=255), blank=True, null=True, size=None + base_field=models.CharField(max_length=255), + blank=True, + null=True, + size=None, ), ), AlterFieldNullSafe( diff --git a/posthog/migrations/0305_rework_person_overrides.py b/posthog/migrations/0305_rework_person_overrides.py index 3afd6d4154b54..e5da39fce39b4 100644 --- a/posthog/migrations/0305_rework_person_overrides.py +++ b/posthog/migrations/0305_rework_person_overrides.py @@ -2,7 +2,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0304_store_dashboard_template_in_db"), ] diff --git a/posthog/migrations/0306_featureflag_dashboard.py b/posthog/migrations/0306_featureflag_dashboard.py index 87d6332e8f08d..b465eda4bee5e 100644 --- a/posthog/migrations/0306_featureflag_dashboard.py +++ b/posthog/migrations/0306_featureflag_dashboard.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0305_rework_person_overrides"), ] @@ -15,7 +14,10 @@ class Migration(migrations.Migration): model_name="featureflag", name="usage_dashboard", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard" + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", ), ), ] diff --git a/posthog/migrations/0307_pluginconfig_admin.py b/posthog/migrations/0307_pluginconfig_admin.py index 2716382f4fc62..2a901a1492bb1 100644 --- a/posthog/migrations/0307_pluginconfig_admin.py +++ b/posthog/migrations/0307_pluginconfig_admin.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0306_featureflag_dashboard"), ] diff --git a/posthog/migrations/0308_add_indirect_person_override_constraints.py b/posthog/migrations/0308_add_indirect_person_override_constraints.py index 757cc46f80126..78231ace0834e 100644 --- a/posthog/migrations/0308_add_indirect_person_override_constraints.py +++ b/posthog/migrations/0308_add_indirect_person_override_constraints.py @@ -20,7 +20,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0307_pluginconfig_admin"), ] @@ -29,7 +28,15 @@ class Migration(migrations.Migration): migrations.CreateModel( name="PersonOverrideMapping", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("uuid", models.UUIDField()), ("team_id", models.BigIntegerField()), ], @@ -73,14 +80,19 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="personoverride", constraint=models.UniqueConstraint( - fields=("team", "old_person_id"), name="unique override per old_person_id" + fields=("team", "old_person_id"), + name="unique override per old_person_id", ), ), migrations.AddConstraint( model_name="personoverride", constraint=models.CheckConstraint( check=models.Q( - ("old_person_id__exact", django.db.models.expressions.F("override_person_id")), _negated=True + ( + "old_person_id__exact", + django.db.models.expressions.F("override_person_id"), + ), + _negated=True, ), name="old_person_id_different_from_override_person_id", ), diff --git a/posthog/migrations/0309_team_autocapture_opt_out.py b/posthog/migrations/0309_team_autocapture_opt_out.py index 5f77749d65eed..11b8e8a9cb0d7 100644 --- a/posthog/migrations/0309_team_autocapture_opt_out.py +++ b/posthog/migrations/0309_team_autocapture_opt_out.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0308_add_indirect_person_override_constraints"), ] diff --git a/posthog/migrations/0310_add_starter_dashboard_template.py b/posthog/migrations/0310_add_starter_dashboard_template.py index 70957615b1975..2d1fc6972b517 100644 --- a/posthog/migrations/0310_add_starter_dashboard_template.py +++ b/posthog/migrations/0310_add_starter_dashboard_template.py @@ -148,7 +148,6 @@ def create_starter_template(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0309_team_autocapture_opt_out"), ] diff --git a/posthog/migrations/0311_dashboard_template_scope.py b/posthog/migrations/0311_dashboard_template_scope.py index 6843e09d68511..41e34afa83109 100644 --- a/posthog/migrations/0311_dashboard_template_scope.py +++ b/posthog/migrations/0311_dashboard_template_scope.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0310_add_starter_dashboard_template"), ] @@ -15,7 +14,10 @@ class Migration(migrations.Migration): model_name="dashboardtemplate", name="scope", field=models.CharField( - choices=[("team", "Only team"), ("global", "Global")], max_length=24, null=True, blank=True + choices=[("team", "Only team"), ("global", "Global")], + max_length=24, + null=True, + blank=True, ), ), migrations.RunSQL( diff --git a/posthog/migrations/0312_organization_available_product_features.py b/posthog/migrations/0312_organization_available_product_features.py index c5b2eb170f9c4..2459cd9726c07 100644 --- a/posthog/migrations/0312_organization_available_product_features.py +++ b/posthog/migrations/0312_organization_available_product_features.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0311_dashboard_template_scope"), ] diff --git a/posthog/migrations/0313_early_access_feature.py b/posthog/migrations/0313_early_access_feature.py index 20d1dfe22d479..e5bc07942ff24 100644 --- a/posthog/migrations/0313_early_access_feature.py +++ b/posthog/migrations/0313_early_access_feature.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0312_organization_available_product_features"), ] @@ -19,7 +18,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=200)), diff --git a/posthog/migrations/0314_sharingconfiguration_recording.py b/posthog/migrations/0314_sharingconfiguration_recording.py index 940a09db8d46f..d4ce07d2ffebc 100644 --- a/posthog/migrations/0314_sharingconfiguration_recording.py +++ b/posthog/migrations/0314_sharingconfiguration_recording.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0313_early_access_feature"), ] diff --git a/posthog/migrations/0315_notebook.py b/posthog/migrations/0315_notebook.py index b14d66d59c3f7..b02a15842a7a1 100644 --- a/posthog/migrations/0315_notebook.py +++ b/posthog/migrations/0315_notebook.py @@ -9,7 +9,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0314_sharingconfiguration_recording"), ] @@ -21,20 +20,36 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, + ), + ), + ( + "short_id", + models.CharField( + blank=True, + default=posthog.utils.generate_short_id, + max_length=12, ), ), - ("short_id", models.CharField(blank=True, default=posthog.utils.generate_short_id, max_length=12)), ("title", models.CharField(blank=True, max_length=256, null=True)), ("content", models.JSONField(blank=True, default=None, null=True)), ("deleted", models.BooleanField(default=False)), ("version", models.IntegerField(default=0)), ("created_at", models.DateTimeField(auto_now_add=True)), - ("last_modified_at", models.DateTimeField(default=django.utils.timezone.now)), + ( + "last_modified_at", + models.DateTimeField(default=django.utils.timezone.now), + ), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( @@ -47,7 +62,10 @@ class Migration(migrations.Migration): to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], options={ "unique_together": {("team", "short_id")}, diff --git a/posthog/migrations/0316_action_href_text_matching.py b/posthog/migrations/0316_action_href_text_matching.py index 870c045ad6a34..fdded1fd8cbf0 100644 --- a/posthog/migrations/0316_action_href_text_matching.py +++ b/posthog/migrations/0316_action_href_text_matching.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0315_notebook"), ] @@ -15,7 +14,11 @@ class Migration(migrations.Migration): name="href_matching", field=models.CharField( blank=True, - choices=[("contains", "contains"), ("regex", "regex"), ("exact", "exact")], + choices=[ + ("contains", "contains"), + ("regex", "regex"), + ("exact", "exact"), + ], max_length=400, null=True, ), @@ -25,7 +28,11 @@ class Migration(migrations.Migration): name="text_matching", field=models.CharField( blank=True, - choices=[("contains", "contains"), ("regex", "regex"), ("exact", "exact")], + choices=[ + ("contains", "contains"), + ("regex", "regex"), + ("exact", "exact"), + ], max_length=400, null=True, ), diff --git a/posthog/migrations/0317_batch_export_models.py b/posthog/migrations/0317_batch_export_models.py index b8feb24b4b75e..f17bf9293ad34 100644 --- a/posthog/migrations/0317_batch_export_models.py +++ b/posthog/migrations/0317_batch_export_models.py @@ -17,7 +17,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ( @@ -39,13 +42,15 @@ class Migration(migrations.Migration): ( "created_at", models.DateTimeField( - auto_now_add=True, help_text="The timestamp at which this BatchExportDestination was created." + auto_now_add=True, + help_text="The timestamp at which this BatchExportDestination was created.", ), ), ( "last_updated_at", models.DateTimeField( - auto_now=True, help_text="The timestamp at which this BatchExportDestination was last updated." + auto_now=True, + help_text="The timestamp at which this BatchExportDestination was last updated.", ), ), ], @@ -59,7 +64,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ( @@ -70,7 +78,10 @@ class Migration(migrations.Migration): to="posthog.team", ), ), - ("name", models.TextField(help_text="A human-readable name for this BatchExport.")), + ( + "name", + models.TextField(help_text="A human-readable name for this BatchExport."), + ), ( "destination", models.ForeignKey( @@ -88,21 +99,32 @@ class Migration(migrations.Migration): max_length=64, ), ), - ("paused", models.BooleanField(default=False, help_text="Whether this BatchExport is paused or not.")), + ( + "paused", + models.BooleanField( + default=False, + help_text="Whether this BatchExport is paused or not.", + ), + ), ( "deleted", - models.BooleanField(default=False, help_text="Whether this BatchExport is deleted or not."), + models.BooleanField( + default=False, + help_text="Whether this BatchExport is deleted or not.", + ), ), ( "created_at", models.DateTimeField( - auto_now_add=True, help_text="The timestamp at which this BatchExport was created." + auto_now_add=True, + help_text="The timestamp at which this BatchExport was created.", ), ), ( "last_updated_at", models.DateTimeField( - auto_now=True, help_text="The timestamp at which this BatchExport was last updated." + auto_now=True, + help_text="The timestamp at which this BatchExport was last updated.", ), ), ], @@ -116,7 +138,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ( @@ -138,31 +163,52 @@ class Migration(migrations.Migration): ), ( "records_completed", - models.IntegerField(help_text="The number of records that have been exported.", null=True), + models.IntegerField( + help_text="The number of records that have been exported.", + null=True, + ), ), ( "latest_error", - models.TextField(help_text="The latest error that occurred during this run.", null=True), + models.TextField( + help_text="The latest error that occurred during this run.", + null=True, + ), + ), + ( + "data_interval_start", + models.DateTimeField(help_text="The start of the data interval."), + ), + ( + "data_interval_end", + models.DateTimeField(help_text="The end of the data interval."), + ), + ( + "cursor", + models.TextField( + help_text="An opaque cursor that may be used to resume.", + null=True, + ), ), - ("data_interval_start", models.DateTimeField(help_text="The start of the data interval.")), - ("data_interval_end", models.DateTimeField(help_text="The end of the data interval.")), - ("cursor", models.TextField(help_text="An opaque cursor that may be used to resume.", null=True)), ( "created_at", models.DateTimeField( - auto_now_add=True, help_text="The timestamp at which this BatchExportRun was created." + auto_now_add=True, + help_text="The timestamp at which this BatchExportRun was created.", ), ), ( "finished_at", models.DateTimeField( - help_text="The timestamp at which this BatchExportRun finished, successfully or not.", null=True + help_text="The timestamp at which this BatchExportRun finished, successfully or not.", + null=True, ), ), ( "last_updated_at", models.DateTimeField( - auto_now=True, help_text="The timestamp at which this BatchExportRun was last updated." + auto_now=True, + help_text="The timestamp at which this BatchExportRun was last updated.", ), ), ( diff --git a/posthog/migrations/0318_alter_earlyaccessfeature_stage.py b/posthog/migrations/0318_alter_earlyaccessfeature_stage.py index 2657a38695868..92abd1afd5d86 100644 --- a/posthog/migrations/0318_alter_earlyaccessfeature_stage.py +++ b/posthog/migrations/0318_alter_earlyaccessfeature_stage.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0317_batch_export_models"), ] diff --git a/posthog/migrations/0319_user_requested_password_reset_at.py b/posthog/migrations/0319_user_requested_password_reset_at.py index 0b51cd0063256..7de6560f71fec 100644 --- a/posthog/migrations/0319_user_requested_password_reset_at.py +++ b/posthog/migrations/0319_user_requested_password_reset_at.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0318_alter_earlyaccessfeature_stage"), ] diff --git a/posthog/migrations/0320_survey.py b/posthog/migrations/0320_survey.py index 9e8fea849ef9b..8dff33ee768db 100644 --- a/posthog/migrations/0320_survey.py +++ b/posthog/migrations/0320_survey.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0319_user_requested_password_reset_at"), ] @@ -19,7 +18,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=400)), diff --git a/posthog/migrations/0321_add_exception_autocapture_optin.py b/posthog/migrations/0321_add_exception_autocapture_optin.py index a1adce2374eb6..c15700964f90d 100644 --- a/posthog/migrations/0321_add_exception_autocapture_optin.py +++ b/posthog/migrations/0321_add_exception_autocapture_optin.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0320_survey"), ] diff --git a/posthog/migrations/0322_auto_20230531_1904.py b/posthog/migrations/0322_auto_20230531_1904.py index c7b774f365c67..687d77316a99f 100644 --- a/posthog/migrations/0322_auto_20230531_1904.py +++ b/posthog/migrations/0322_auto_20230531_1904.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0321_add_exception_autocapture_optin"), ] @@ -14,13 +13,30 @@ class Migration(migrations.Migration): migrations.CreateModel( name="FeatureFlagDashboards", fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), ("created_at", models.DateTimeField(auto_now_add=True, null=True)), ("updated_at", models.DateTimeField(auto_now=True, null=True)), - ("dashboard", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard")), + ( + "dashboard", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", + ), + ), ( "feature_flag", - models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.featureflag"), + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="posthog.featureflag", + ), ), ], ), @@ -37,7 +53,8 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="featureflagdashboards", constraint=models.UniqueConstraint( - fields=("feature_flag", "dashboard"), name="unique feature flag for a dashboard" + fields=("feature_flag", "dashboard"), + name="unique feature flag for a dashboard", ), ), ] diff --git a/posthog/migrations/0324_user_has_seen_product_intro_for.py b/posthog/migrations/0324_user_has_seen_product_intro_for.py index 5c75b6a7472e4..6c5142cdf47a6 100644 --- a/posthog/migrations/0324_user_has_seen_product_intro_for.py +++ b/posthog/migrations/0324_user_has_seen_product_intro_for.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0323_alter_batchexportdestination_type"), ] diff --git a/posthog/migrations/0325_alter_dashboardtemplate_scope.py b/posthog/migrations/0325_alter_dashboardtemplate_scope.py index cccfc358af848..d6829a963ac7c 100644 --- a/posthog/migrations/0325_alter_dashboardtemplate_scope.py +++ b/posthog/migrations/0325_alter_dashboardtemplate_scope.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0324_user_has_seen_product_intro_for"), ] @@ -15,7 +14,11 @@ class Migration(migrations.Migration): name="scope", field=models.CharField( blank=True, - choices=[("team", "Only team"), ("global", "Global"), ("feature_flag", "Feature Flag")], + choices=[ + ("team", "Only team"), + ("global", "Global"), + ("feature_flag", "Feature Flag"), + ], max_length=24, null=True, ), diff --git a/posthog/migrations/0326_team_extra_settings.py b/posthog/migrations/0326_team_extra_settings.py index 1ce5ca0886c75..62deb954703eb 100644 --- a/posthog/migrations/0326_team_extra_settings.py +++ b/posthog/migrations/0326_team_extra_settings.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0325_alter_dashboardtemplate_scope"), ] diff --git a/posthog/migrations/0327_alter_earlyaccessfeature_stage.py b/posthog/migrations/0327_alter_earlyaccessfeature_stage.py index c9d2454d40d21..f5376b89e999c 100644 --- a/posthog/migrations/0327_alter_earlyaccessfeature_stage.py +++ b/posthog/migrations/0327_alter_earlyaccessfeature_stage.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0326_team_extra_settings"), ] diff --git a/posthog/migrations/0328_add_starter_feature_flag_template.py b/posthog/migrations/0328_add_starter_feature_flag_template.py index adf9d5b971a60..eef2e038dc7a9 100644 --- a/posthog/migrations/0328_add_starter_feature_flag_template.py +++ b/posthog/migrations/0328_add_starter_feature_flag_template.py @@ -59,7 +59,6 @@ def create_starter_template(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("posthog", "0327_alter_earlyaccessfeature_stage"), ] diff --git a/posthog/migrations/0329_datawarehousecredential_datawarehousetable.py b/posthog/migrations/0329_datawarehousecredential_datawarehousetable.py index 7b1e88d018b8d..b3957067826df 100644 --- a/posthog/migrations/0329_datawarehousecredential_datawarehousetable.py +++ b/posthog/migrations/0329_datawarehousecredential_datawarehousetable.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0328_add_starter_feature_flag_template"), ] @@ -21,18 +20,33 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), - ("access_key", encrypted_fields.fields.EncryptedTextField(max_length=500)), - ("access_secret", encrypted_fields.fields.EncryptedTextField(max_length=500)), + ( + "access_key", + encrypted_fields.fields.EncryptedTextField(max_length=500), + ), + ( + "access_secret", + encrypted_fields.fields.EncryptedTextField(max_length=500), + ), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], options={ "abstract": False, @@ -46,11 +60,17 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("name", models.CharField(max_length=128)), - ("format", models.CharField(choices=[("CSV", "CSV"), ("Parquet", "Parquet")], max_length=128)), + ( + "format", + models.CharField(choices=[("CSV", "CSV"), ("Parquet", "Parquet")], max_length=128), + ), ("url_pattern", models.CharField(max_length=500)), ( "columns", @@ -64,7 +84,10 @@ class Migration(migrations.Migration): ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( @@ -76,7 +99,10 @@ class Migration(migrations.Migration): to="posthog.datawarehousecredential", ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], options={ "abstract": False, diff --git a/posthog/migrations/0330_add_autocapture_exceptions_events_to_ignore.py b/posthog/migrations/0330_add_autocapture_exceptions_events_to_ignore.py index ae830ee034d0d..71671bb4096f1 100644 --- a/posthog/migrations/0330_add_autocapture_exceptions_events_to_ignore.py +++ b/posthog/migrations/0330_add_autocapture_exceptions_events_to_ignore.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0329_datawarehousecredential_datawarehousetable"), ] diff --git a/posthog/migrations/0331_add_missing_property_definition_index.py b/posthog/migrations/0331_add_missing_property_definition_index.py index f4c0bcbca4fad..376cec5d0b3d7 100644 --- a/posthog/migrations/0331_add_missing_property_definition_index.py +++ b/posthog/migrations/0331_add_missing_property_definition_index.py @@ -18,6 +18,9 @@ class Migration(migrations.Migration): operations = [ AddIndexConcurrently( model_name="propertydefinition", - index=models.Index(fields=["team_id", "type", "is_numerical"], name="posthog_pro_team_id_eac36d_idx"), + index=models.Index( + fields=["team_id", "type", "is_numerical"], + name="posthog_pro_team_id_eac36d_idx", + ), ), ] diff --git a/posthog/migrations/0332_featureflag_has_enriched_analytics.py b/posthog/migrations/0332_featureflag_has_enriched_analytics.py index 259845b925947..d12ca4079d5aa 100644 --- a/posthog/migrations/0332_featureflag_has_enriched_analytics.py +++ b/posthog/migrations/0332_featureflag_has_enriched_analytics.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0331_add_missing_property_definition_index"), ] diff --git a/posthog/migrations/0333_add_timestamp_fields_to_batch_exports.py b/posthog/migrations/0333_add_timestamp_fields_to_batch_exports.py index aa9654a3ca275..b945693d75c20 100644 --- a/posthog/migrations/0333_add_timestamp_fields_to_batch_exports.py +++ b/posthog/migrations/0333_add_timestamp_fields_to_batch_exports.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0332_featureflag_has_enriched_analytics"), ] @@ -14,21 +13,27 @@ class Migration(migrations.Migration): model_name="batchexport", name="end_at", field=models.DateTimeField( - default=None, help_text="Time after which any Batch Export runs won't be triggered.", null=True + default=None, + help_text="Time after which any Batch Export runs won't be triggered.", + null=True, ), ), migrations.AddField( model_name="batchexport", name="last_paused_at", field=models.DateTimeField( - default=None, help_text="The timestamp at which this BatchExport was last paused.", null=True + default=None, + help_text="The timestamp at which this BatchExport was last paused.", + null=True, ), ), migrations.AddField( model_name="batchexport", name="start_at", field=models.DateTimeField( - default=None, help_text="Time before which any Batch Export runs won't be triggered.", null=True + default=None, + help_text="Time before which any Batch Export runs won't be triggered.", + null=True, ), ), ] diff --git a/posthog/migrations/0334_add_asset_ttl.py b/posthog/migrations/0334_add_asset_ttl.py index ec77eedb9cd78..8a97eca2b11e4 100644 --- a/posthog/migrations/0334_add_asset_ttl.py +++ b/posthog/migrations/0334_add_asset_ttl.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0333_add_timestamp_fields_to_batch_exports"), ] diff --git a/posthog/migrations/0335_alter_asyncdeletion_deletion_type.py b/posthog/migrations/0335_alter_asyncdeletion_deletion_type.py index 33f5294d206ef..36d8ddbccc372 100644 --- a/posthog/migrations/0335_alter_asyncdeletion_deletion_type.py +++ b/posthog/migrations/0335_alter_asyncdeletion_deletion_type.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0334_add_asset_ttl"), ] @@ -14,7 +13,13 @@ class Migration(migrations.Migration): model_name="asyncdeletion", name="deletion_type", field=models.PositiveSmallIntegerField( - choices=[(0, "Team"), (1, "Person"), (2, "Group"), (3, "Cohort Stale"), (4, "Cohort Full")] + choices=[ + (0, "Team"), + (1, "Person"), + (2, "Group"), + (3, "Cohort Stale"), + (4, "Cohort Full"), + ] ), ), ] diff --git a/posthog/migrations/0336_alter_survey_type.py b/posthog/migrations/0336_alter_survey_type.py index c432eb88eed46..8fe97122f5db7 100644 --- a/posthog/migrations/0336_alter_survey_type.py +++ b/posthog/migrations/0336_alter_survey_type.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0335_alter_asyncdeletion_deletion_type"), ] diff --git a/posthog/migrations/0337_more_session_recording_fields.py b/posthog/migrations/0337_more_session_recording_fields.py index 66e59191c3b6d..c0396ef1417b5 100644 --- a/posthog/migrations/0337_more_session_recording_fields.py +++ b/posthog/migrations/0337_more_session_recording_fields.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0336_alter_survey_type"), ] diff --git a/posthog/migrations/0338_datawarehouse_saved_query.py b/posthog/migrations/0338_datawarehouse_saved_query.py index eac5feef35a87..88ab851d0b1c8 100644 --- a/posthog/migrations/0338_datawarehouse_saved_query.py +++ b/posthog/migrations/0338_datawarehouse_saved_query.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0337_more_session_recording_fields"), ] @@ -22,7 +21,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ( @@ -41,20 +43,30 @@ class Migration(migrations.Migration): null=True, ), ), - ("query", models.JSONField(blank=True, default=dict, help_text="HogQL query", null=True)), + ( + "query", + models.JSONField(blank=True, default=dict, help_text="HogQL query", null=True), + ), ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], ), migrations.AddConstraint( model_name="datawarehousesavedquery", constraint=models.UniqueConstraint( - fields=("team", "name"), name="posthog_datawarehouse_saved_query_unique_name" + fields=("team", "name"), + name="posthog_datawarehouse_saved_query_unique_name", ), ), ] diff --git a/posthog/migrations/0339_add_user_scene_personalisation.py b/posthog/migrations/0339_add_user_scene_personalisation.py index d38c1ec1da9a7..aede86b617e5a 100644 --- a/posthog/migrations/0339_add_user_scene_personalisation.py +++ b/posthog/migrations/0339_add_user_scene_personalisation.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0338_datawarehouse_saved_query"), ] @@ -19,20 +18,29 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("scene", models.CharField(max_length=200)), ( "dashboard", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.dashboard" + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.dashboard", ), ), ( "team", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="posthog.team" + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="posthog.team", ), ), ( @@ -50,7 +58,8 @@ class Migration(migrations.Migration): migrations.AddConstraint( model_name="userscenepersonalisation", constraint=models.UniqueConstraint( - fields=("team", "user", "scene"), name="posthog_unique_scene_personalisation" + fields=("team", "user", "scene"), + name="posthog_unique_scene_personalisation", ), ), ] diff --git a/posthog/migrations/0340_action_bytecode.py b/posthog/migrations/0340_action_bytecode.py index c55a3678f0142..3603c83d8ef1a 100644 --- a/posthog/migrations/0340_action_bytecode.py +++ b/posthog/migrations/0340_action_bytecode.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0339_add_user_scene_personalisation"), ] diff --git a/posthog/migrations/0341_add_session_recording_storage_version.py b/posthog/migrations/0341_add_session_recording_storage_version.py index 92828fd84168b..6e81b4105e6fc 100644 --- a/posthog/migrations/0341_add_session_recording_storage_version.py +++ b/posthog/migrations/0341_add_session_recording_storage_version.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0340_action_bytecode"), ] diff --git a/posthog/migrations/0342_alter_featureflag_usage_dashboard.py b/posthog/migrations/0342_alter_featureflag_usage_dashboard.py index 942413fd5d49a..55feb73d8a362 100644 --- a/posthog/migrations/0342_alter_featureflag_usage_dashboard.py +++ b/posthog/migrations/0342_alter_featureflag_usage_dashboard.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0341_add_session_recording_storage_version"), ] @@ -15,7 +14,10 @@ class Migration(migrations.Migration): model_name="featureflag", name="usage_dashboard", field=models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to="posthog.dashboard" + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to="posthog.dashboard", ), ), ] diff --git a/posthog/migrations/0343_team_has_completed_onboarding_for.py b/posthog/migrations/0343_team_has_completed_onboarding_for.py index c6fba2fc6334f..e3c1aab1edc01 100644 --- a/posthog/migrations/0343_team_has_completed_onboarding_for.py +++ b/posthog/migrations/0343_team_has_completed_onboarding_for.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0342_alter_featureflag_usage_dashboard"), ] diff --git a/posthog/migrations/0344_add_new_export_type.py b/posthog/migrations/0344_add_new_export_type.py index 49155d32f940b..947c1cb537a40 100644 --- a/posthog/migrations/0344_add_new_export_type.py +++ b/posthog/migrations/0344_add_new_export_type.py @@ -13,7 +13,11 @@ class Migration(migrations.Migration): model_name="batchexportdestination", name="type", field=models.CharField( - choices=[("S3", "S3"), ("Snowflake", "Snowflake"), ("Postgres", "Postgres")], + choices=[ + ("S3", "S3"), + ("Snowflake", "Snowflake"), + ("Postgres", "Postgres"), + ], help_text="A choice of supported BatchExportDestination types.", max_length=64, ), diff --git a/posthog/migrations/0345_view_link_and_s3_table_update.py b/posthog/migrations/0345_view_link_and_s3_table_update.py index 0e91d001128e9..5b0cbcc45b68b 100644 --- a/posthog/migrations/0345_view_link_and_s3_table_update.py +++ b/posthog/migrations/0345_view_link_and_s3_table_update.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0344_add_new_export_type"), ] @@ -16,7 +15,12 @@ class Migration(migrations.Migration): migrations.AddField( model_name="datawarehousesavedquery", name="external_tables", - field=models.JSONField(blank=True, default=list, help_text="List of all external tables", null=True), + field=models.JSONField( + blank=True, + default=list, + help_text="List of all external tables", + null=True, + ), ), migrations.CreateModel( name="DataWarehouseViewLink", @@ -26,7 +30,10 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), ("table", models.CharField(max_length=128)), @@ -35,16 +42,23 @@ class Migration(migrations.Migration): ( "created_by", models.ForeignKey( - blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL + blank=True, + null=True, + on_delete=django.db.models.deletion.SET_NULL, + to=settings.AUTH_USER_MODEL, ), ), ( "saved_query", models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, to="posthog.datawarehousesavedquery" + on_delete=django.db.models.deletion.CASCADE, + to="posthog.datawarehousesavedquery", ), ), - ("team", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team")), + ( + "team", + models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="posthog.team"), + ), ], options={ "abstract": False, diff --git a/posthog/migrations/0346_team_week_start_day.py b/posthog/migrations/0346_team_week_start_day.py index d8f659ccbefae..716fd071fff04 100644 --- a/posthog/migrations/0346_team_week_start_day.py +++ b/posthog/migrations/0346_team_week_start_day.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0345_view_link_and_s3_table_update"), ] diff --git a/posthog/migrations/0347_add_bigquery_export_type.py b/posthog/migrations/0347_add_bigquery_export_type.py index 862befb04723a..6f9b25a1b38de 100644 --- a/posthog/migrations/0347_add_bigquery_export_type.py +++ b/posthog/migrations/0347_add_bigquery_export_type.py @@ -13,7 +13,12 @@ class Migration(migrations.Migration): model_name="batchexportdestination", name="type", field=models.CharField( - choices=[("S3", "S3"), ("Snowflake", "Snowflake"), ("Postgres", "Postgres"), ("BigQuery", "Bigquery")], + choices=[ + ("S3", "S3"), + ("Snowflake", "Snowflake"), + ("Postgres", "Postgres"), + ("BigQuery", "Bigquery"), + ], help_text="A choice of supported BatchExportDestination types.", max_length=64, ), diff --git a/posthog/migrations/0348_alter_datawarehousetable_format.py b/posthog/migrations/0348_alter_datawarehousetable_format.py index 72434bbc99fdb..d2f464830a957 100644 --- a/posthog/migrations/0348_alter_datawarehousetable_format.py +++ b/posthog/migrations/0348_alter_datawarehousetable_format.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0347_add_bigquery_export_type"), ] @@ -14,7 +13,12 @@ class Migration(migrations.Migration): model_name="datawarehousetable", name="format", field=models.CharField( - choices=[("CSV", "CSV"), ("Parquet", "Parquet"), ("JSONEachRow", "JSON")], max_length=128 + choices=[ + ("CSV", "CSV"), + ("Parquet", "Parquet"), + ("JSONEachRow", "JSON"), + ], + max_length=128, ), ), ] diff --git a/posthog/migrations/0349_update_survey_query_name.py b/posthog/migrations/0349_update_survey_query_name.py index cbcbbb3a0c954..13235cdc67fbc 100644 --- a/posthog/migrations/0349_update_survey_query_name.py +++ b/posthog/migrations/0349_update_survey_query_name.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0348_alter_datawarehousetable_format"), ] diff --git a/posthog/migrations/0350_add_notebook_text_content.py b/posthog/migrations/0350_add_notebook_text_content.py index bfe4b079b9945..b2a5c0c14285f 100644 --- a/posthog/migrations/0350_add_notebook_text_content.py +++ b/posthog/migrations/0350_add_notebook_text_content.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0349_update_survey_query_name"), ] diff --git a/posthog/migrations/0351_team_surveys_opt_in.py b/posthog/migrations/0351_team_surveys_opt_in.py index c1722b7a11000..207677ab8b36d 100644 --- a/posthog/migrations/0351_team_surveys_opt_in.py +++ b/posthog/migrations/0351_team_surveys_opt_in.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0350_add_notebook_text_content"), ] diff --git a/posthog/migrations/0353_add_5_minute_interval_to_batch_exports.py b/posthog/migrations/0353_add_5_minute_interval_to_batch_exports.py index 014edcd509144..3b255ea8a4778 100644 --- a/posthog/migrations/0353_add_5_minute_interval_to_batch_exports.py +++ b/posthog/migrations/0353_add_5_minute_interval_to_batch_exports.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0352_auto_20230926_1833"), ] @@ -14,7 +13,12 @@ class Migration(migrations.Migration): model_name="batchexport", name="interval", field=models.CharField( - choices=[("hour", "hour"), ("day", "day"), ("week", "week"), ("every 5 minutes", "every 5 minutes")], + choices=[ + ("hour", "hour"), + ("day", "day"), + ("week", "week"), + ("every 5 minutes", "every 5 minutes"), + ], default="hour", help_text="The interval at which to export data.", max_length=64, diff --git a/posthog/migrations/0354_organization_never_drop_data.py b/posthog/migrations/0354_organization_never_drop_data.py index 154446df669d5..560f1f518c612 100644 --- a/posthog/migrations/0354_organization_never_drop_data.py +++ b/posthog/migrations/0354_organization_never_drop_data.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0353_add_5_minute_interval_to_batch_exports"), ] diff --git a/posthog/migrations/0355_add_batch_export_backfill_model.py b/posthog/migrations/0355_add_batch_export_backfill_model.py index c558d2a74d7f8..294ad7e019db6 100644 --- a/posthog/migrations/0355_add_batch_export_backfill_model.py +++ b/posthog/migrations/0355_add_batch_export_backfill_model.py @@ -33,11 +33,20 @@ class Migration(migrations.Migration): ( "id", models.UUIDField( - default=posthog.models.utils.UUIDT, editable=False, primary_key=True, serialize=False + default=posthog.models.utils.UUIDT, + editable=False, + primary_key=True, + serialize=False, ), ), - ("start_at", models.DateTimeField(help_text="The start of the data interval.")), - ("end_at", models.DateTimeField(help_text="The end of the data interval.")), + ( + "start_at", + models.DateTimeField(help_text="The start of the data interval."), + ), + ( + "end_at", + models.DateTimeField(help_text="The end of the data interval."), + ), ( "status", models.CharField( @@ -58,7 +67,8 @@ class Migration(migrations.Migration): ( "created_at", models.DateTimeField( - auto_now_add=True, help_text="The timestamp at which this BatchExportBackfill was created." + auto_now_add=True, + help_text="The timestamp at which this BatchExportBackfill was created.", ), ), ( @@ -71,7 +81,8 @@ class Migration(migrations.Migration): ( "last_updated_at", models.DateTimeField( - auto_now=True, help_text="The timestamp at which this BatchExportBackfill was last updated." + auto_now=True, + help_text="The timestamp at which this BatchExportBackfill was last updated.", ), ), ( diff --git a/posthog/migrations/0356_add_replay_cost_control.py b/posthog/migrations/0356_add_replay_cost_control.py index 96c5cb166f4f8..72e04e1d8a46f 100644 --- a/posthog/migrations/0356_add_replay_cost_control.py +++ b/posthog/migrations/0356_add_replay_cost_control.py @@ -6,7 +6,6 @@ class Migration(migrations.Migration): - dependencies = [ ("posthog", "0355_add_batch_export_backfill_model"), ] diff --git a/posthog/models/__init__.py b/posthog/models/__init__.py index 89432e0809984..b5f0586a349bb 100644 --- a/posthog/models/__init__.py +++ b/posthog/models/__init__.py @@ -37,7 +37,13 @@ from .organization_domain import OrganizationDomain from .person import Person, PersonDistinctId, PersonOverride, PersonOverrideMapping from .personal_api_key import PersonalAPIKey -from .plugin import Plugin, PluginAttachment, PluginConfig, PluginSourceFile, PluginLogEntry +from .plugin import ( + Plugin, + PluginAttachment, + PluginConfig, + PluginSourceFile, + PluginLogEntry, +) from .prompt.prompt import Prompt, PromptSequence, UserPromptState from .property import Property from .property_definition import PropertyDefinition @@ -51,8 +57,12 @@ from .user import User, UserManager from .user_scene_personalisation import UserScenePersonalisation from ..session_recordings.models.session_recording import SessionRecording -from ..session_recordings.models.session_recording_playlist import SessionRecordingPlaylist -from ..session_recordings.models.session_recording_playlist_item import SessionRecordingPlaylistItem +from ..session_recordings.models.session_recording_playlist import ( + SessionRecordingPlaylist, +) +from ..session_recordings.models.session_recording_playlist_item import ( + SessionRecordingPlaylistItem, +) __all__ = [ "Action", diff --git a/posthog/models/action/action.py b/posthog/models/action/action.py index 28642eaedeb53..368100fcbc978 100644 --- a/posthog/models/action/action.py +++ b/posthog/models/action/action.py @@ -78,7 +78,10 @@ def refresh_bytecode(self): @receiver(post_save, sender=Action) def action_saved(sender, instance: Action, created, **kwargs): - get_client().publish("reload-action", json.dumps({"teamId": instance.team_id, "actionId": instance.id})) + get_client().publish( + "reload-action", + json.dumps({"teamId": instance.team_id, "actionId": instance.id}), + ) @mutable_receiver(post_delete, sender=Action) diff --git a/posthog/models/action/util.py b/posthog/models/action/util.py index 7a1fcd007f0aa..b67ecf0115dc4 100644 --- a/posthog/models/action/util.py +++ b/posthog/models/action/util.py @@ -35,7 +35,9 @@ def format_action_filter( conditions: List[str] = [] # filter element if step.event == AUTOCAPTURE_EVENT: - from posthog.models.property.util import filter_element # prevent circular import + from posthog.models.property.util import ( + filter_element, + ) # prevent circular import if step.selector: element_condition, element_params = filter_element( diff --git a/posthog/models/action_step.py b/posthog/models/action_step.py index 2f792f1f74d85..036e6fad60d13 100644 --- a/posthog/models/action_step.py +++ b/posthog/models/action_step.py @@ -52,7 +52,8 @@ class ActionStep(models.Model): def action_step_saved(sender, instance: ActionStep, created, **kwargs): instance.action.refresh_bytecode() get_client().publish( - "reload-action", json.dumps({"teamId": instance.action.team_id, "actionId": instance.action.id}) + "reload-action", + json.dumps({"teamId": instance.action.team_id, "actionId": instance.action.id}), ) @@ -60,5 +61,6 @@ def action_step_saved(sender, instance: ActionStep, created, **kwargs): def action_step_deleted(sender, instance: ActionStep, **kwargs): instance.action.refresh_bytecode() get_client().publish( - "reload-action", json.dumps({"teamId": instance.action.team_id, "actionId": instance.action.id}) + "reload-action", + json.dumps({"teamId": instance.action.team_id, "actionId": instance.action.id}), ) diff --git a/posthog/models/activity_logging/activity_log.py b/posthog/models/activity_logging/activity_log.py index f3b36e2c3dbd0..94a9c0914faf6 100644 --- a/posthog/models/activity_logging/activity_log.py +++ b/posthog/models/activity_logging/activity_log.py @@ -99,8 +99,23 @@ class Meta: field_exclusions: Dict[ActivityScope, List[str]] = { - "Notebook": ["id", "last_modified_at", "last_modified_by", "created_at", "created_by", "text_content"], - "FeatureFlag": ["id", "created_at", "created_by", "is_simple_flag", "experiment", "team", "featureflagoverride"], + "Notebook": [ + "id", + "last_modified_at", + "last_modified_by", + "created_at", + "created_by", + "text_content", + ], + "FeatureFlag": [ + "id", + "created_at", + "created_by", + "is_simple_flag", + "experiment", + "team", + "featureflagoverride", + ], "Person": [ "id", "uuid", @@ -143,7 +158,14 @@ class Meta: "dashboardtile", "caching_states", ], - "SessionRecordingPlaylist": ["id", "short_id", "created_at", "created_by", "last_modified_at", "last_modified_by"], + "SessionRecordingPlaylist": [ + "id", + "short_id", + "created_at", + "created_by", + "last_modified_at", + "last_modified_by", + ], "EventDefinition": [ "eventdefinition_ptr_id", "id", @@ -246,13 +268,24 @@ def changes_between( elif right is None and left is not None: changes.append(Change(type=model_type, field=field, action="deleted", before=left)) elif left != right: - changes.append(Change(type=model_type, field=field, action="changed", before=left, after=right)) + changes.append( + Change( + type=model_type, + field=field, + action="changed", + before=left, + after=right, + ) + ) return changes def dict_changes_between( - model_type: ActivityScope, previous: Dict[Any, Any], new: Dict[Any, Any], use_field_exclusions: bool = False + model_type: ActivityScope, + previous: Dict[Any, Any], + new: Dict[Any, Any], + use_field_exclusions: bool = False, ) -> List[Change]: """ Identifies changes between two dictionaries by comparing fields @@ -276,10 +309,23 @@ def dict_changes_between( if previous_value is None and new_value is not None: changes.append(Change(type=model_type, field=field, action="created", after=new_value)) elif new_value is None and previous_value is not None: - changes.append(Change(type=model_type, field=field, action="deleted", before=previous_value)) + changes.append( + Change( + type=model_type, + field=field, + action="deleted", + before=previous_value, + ) + ) elif previous_value != new_value: changes.append( - Change(type=model_type, field=field, action="changed", before=previous_value, after=new_value) + Change( + type=model_type, + field=field, + action="changed", + before=previous_value, + after=new_value, + ) ) return changes @@ -350,7 +396,11 @@ def get_activity_page(activity_query: models.QuerySet, limit: int = 10, page: in def load_activity( - scope: ActivityScope, team_id: int, item_id: Optional[int] = None, limit: int = 10, page: int = 1 + scope: ActivityScope, + team_id: int, + item_id: Optional[int] = None, + limit: int = 10, + page: int = 1, ) -> ActivityPage: # TODO in follow-up to posthog #8931 selecting specific fields into a return type from this query diff --git a/posthog/models/app_metrics/sql.py b/posthog/models/app_metrics/sql.py index 3198b3226e52b..65d1de6de3060 100644 --- a/posthog/models/app_metrics/sql.py +++ b/posthog/models/app_metrics/sql.py @@ -1,7 +1,11 @@ from django.conf import settings from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS_WITH_PARTITION, kafka_engine -from posthog.clickhouse.table_engines import AggregatingMergeTree, Distributed, ReplicationScheme +from posthog.clickhouse.table_engines import ( + AggregatingMergeTree, + Distributed, + ReplicationScheme, +) from posthog.kafka_client.topics import KAFKA_APP_METRICS SHARDED_APP_METRICS_TABLE_ENGINE = lambda: AggregatingMergeTree( diff --git a/posthog/models/async_deletion/async_deletion.py b/posthog/models/async_deletion/async_deletion.py index 513657f73daae..a851fa513f526 100644 --- a/posthog/models/async_deletion/async_deletion.py +++ b/posthog/models/async_deletion/async_deletion.py @@ -21,7 +21,8 @@ class Meta: condition=models.Q(group_type_index__isnull=True), ), models.UniqueConstraint( - name="unique deletion for groups", fields=["deletion_type", "key", "group_type_index"] + name="unique deletion for groups", + fields=["deletion_type", "key", "group_type_index"], ), ] indexes = [models.Index(name="delete_verified_at index", fields=["delete_verified_at"])] diff --git a/posthog/models/async_deletion/delete.py b/posthog/models/async_deletion/delete.py index 378d655a02714..7774cf2384016 100644 --- a/posthog/models/async_deletion/delete.py +++ b/posthog/models/async_deletion/delete.py @@ -41,7 +41,10 @@ def mark_deletions_done(self): AsyncDeletion.objects.filter(pk__in=[row.pk for row in to_verify]).update(delete_verified_at=timezone.now()) logger.warn( "Updated `delete_verified_at` for AsyncDeletion", - {"count": len(to_verify), "team_ids": list(set(row.team_id for row in to_verify))}, + { + "count": len(to_verify), + "team_ids": list(set(row.team_id for row in to_verify)), + }, ) def _fetch_unverified_deletions_grouped(self): diff --git a/posthog/models/async_deletion/delete_cohorts.py b/posthog/models/async_deletion/delete_cohorts.py index a71f16ae26cd0..3a4737c221964 100644 --- a/posthog/models/async_deletion/delete_cohorts.py +++ b/posthog/models/async_deletion/delete_cohorts.py @@ -15,7 +15,10 @@ def process(self, deletions: List[AsyncDeletion]): logger.warn( "Starting AsyncDeletion on `cohortpeople` table in ClickHouse", - {"count": len(deletions), "team_ids": list(set(row.team_id for row in deletions))}, + { + "count": len(deletions), + "team_ids": list(set(row.team_id for row in deletions)), + }, ) conditions, args = self._conditions(deletions) @@ -62,13 +65,20 @@ def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> Tuple[str, D version_param = f"version{suffix}" if async_deletion.deletion_type == DeletionType.Cohort_full: key, _ = async_deletion.key.split("_") - return f"( team_id = %({team_id_param})s AND {self._column_name(async_deletion)} = %({key_param})s )", { - team_id_param: async_deletion.team_id, - key_param: key, - } + return ( + f"( team_id = %({team_id_param})s AND {self._column_name(async_deletion)} = %({key_param})s )", + { + team_id_param: async_deletion.team_id, + key_param: key, + }, + ) else: key, version = async_deletion.key.split("_") return ( f"( team_id = %({team_id_param})s AND {self._column_name(async_deletion)} = %({key_param})s AND version < %({version_param})s )", - {team_id_param: async_deletion.team_id, version_param: version, key_param: key}, + { + team_id_param: async_deletion.team_id, + version_param: version, + key_param: key, + }, ) diff --git a/posthog/models/async_deletion/delete_events.py b/posthog/models/async_deletion/delete_events.py index 5529ca8a95843..cef9c97688f85 100644 --- a/posthog/models/async_deletion/delete_events.py +++ b/posthog/models/async_deletion/delete_events.py @@ -27,7 +27,10 @@ def process(self, deletions: List[AsyncDeletion]): logger.info( "Starting AsyncDeletion on `events` table in ClickHouse", - {"count": len(deletions), "team_ids": list(set(row.team_id for row in deletions))}, + { + "count": len(deletions), + "team_ids": list(set(row.team_id for row in deletions)), + }, ) conditions, args = self._conditions(deletions) @@ -48,7 +51,10 @@ def process(self, deletions: List[AsyncDeletion]): logger.info( "Starting AsyncDeletion for teams on other tables", - {"count": len(team_deletions), "team_ids": list(set(row.team_id for row in deletions))}, + { + "count": len(team_deletions), + "team_ids": list(set(row.team_id for row in deletions)), + }, ) conditions, args = self._conditions(team_deletions) for table in TABLES_TO_DELETE_TEAM_DATA_FROM: @@ -97,5 +103,8 @@ def _condition(self, async_deletion: AsyncDeletion, suffix: str) -> Tuple[str, D else: return ( f"(team_id = %(team_id{suffix})s AND {self._column_name(async_deletion)} = %(key{suffix})s)", - {f"team_id{suffix}": async_deletion.team_id, f"key{suffix}": async_deletion.key}, + { + f"team_id{suffix}": async_deletion.team_id, + f"key{suffix}": async_deletion.key, + }, ) diff --git a/posthog/models/async_migration.py b/posthog/models/async_migration.py index 885f7ce397931..ab60eed94d0c5 100644 --- a/posthog/models/async_migration.py +++ b/posthog/models/async_migration.py @@ -33,7 +33,7 @@ class Meta: null=False, blank=False, default=MigrationStatus.NotStarted ) - current_operation_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( + current_operation_index: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField( null=False, blank=False, default=0 ) current_query_id: models.CharField = models.CharField(max_length=100, null=False, blank=False, default="") diff --git a/posthog/models/cohort/cohort.py b/posthog/models/cohort/cohort.py index b101a0fa68bf6..b907df41c934a 100644 --- a/posthog/models/cohort/cohort.py +++ b/posthog/models/cohort/cohort.py @@ -193,7 +193,12 @@ def calculate_people_ch(self, pending_version): from posthog.models.cohort.util import recalculate_cohortpeople from posthog.tasks.calculate_cohort import clear_stale_cohort - logger.warn("cohort_calculation_started", id=self.pk, current_version=self.version, new_version=pending_version) + logger.warn( + "cohort_calculation_started", + id=self.pk, + current_version=self.version, + new_version=pending_version, + ) start_time = time.monotonic() try: @@ -237,7 +242,10 @@ def insert_users_by_list(self, items: List[str]) -> None: """ batchsize = 1000 - from posthog.models.cohort.util import insert_static_cohort, get_static_cohort_size + from posthog.models.cohort.util import ( + insert_static_cohort, + get_static_cohort_size, + ) if TEST: from posthog.test.base import flush_persons_and_events @@ -251,15 +259,26 @@ def insert_users_by_list(self, items: List[str]) -> None: batch = items[i : i + batchsize] persons_query = ( Person.objects.filter(team_id=self.team_id) - .filter(Q(persondistinctid__team_id=self.team_id, persondistinctid__distinct_id__in=batch)) + .filter( + Q( + persondistinctid__team_id=self.team_id, + persondistinctid__distinct_id__in=batch, + ) + ) .exclude(cohort__id=self.id) ) - insert_static_cohort([p for p in persons_query.values_list("uuid", flat=True)], self.pk, self.team) + insert_static_cohort( + [p for p in persons_query.values_list("uuid", flat=True)], + self.pk, + self.team, + ) sql, params = persons_query.distinct("pk").only("pk").query.sql_with_params() query = UPDATE_QUERY.format( cohort_id=self.pk, values_query=sql.replace( - 'FROM "posthog_person"', f', {self.pk}, {self.version or "NULL"} FROM "posthog_person"', 1 + 'FROM "posthog_person"', + f', {self.pk}, {self.version or "NULL"} FROM "posthog_person"', + 1, ), ) cursor.execute(query, params) @@ -294,7 +313,9 @@ def insert_users_list_by_uuid(self, items: List[str]) -> None: query = UPDATE_QUERY.format( cohort_id=self.pk, values_query=sql.replace( - 'FROM "posthog_person"', f', {self.pk}, {self.version or "NULL"} FROM "posthog_person"', 1 + 'FROM "posthog_person"', + f', {self.pk}, {self.version or "NULL"} FROM "posthog_person"', + 1, ), ) cursor.execute(query, params) diff --git a/posthog/models/cohort/sql.py b/posthog/models/cohort/sql.py index b73662931aeb2..821e84e29fd37 100644 --- a/posthog/models/cohort/sql.py +++ b/posthog/models/cohort/sql.py @@ -19,7 +19,9 @@ Order By (team_id, cohort_id, person_id, version) {storage_policy} """.format( - cluster=CLICKHOUSE_CLUSTER, engine=COHORTPEOPLE_TABLE_ENGINE(), storage_policy="" + cluster=CLICKHOUSE_CLUSTER, + engine=COHORTPEOPLE_TABLE_ENGINE(), + storage_policy="", ) TRUNCATE_COHORTPEOPLE_TABLE_SQL = f"TRUNCATE TABLE IF EXISTS cohortpeople ON CLUSTER '{CLICKHOUSE_CLUSTER}'" diff --git a/posthog/models/cohort/test/test_util.py b/posthog/models/cohort/test/test_util.py index 7db7d6a5e0130..d8ff051a0bb41 100644 --- a/posthog/models/cohort/test/test_util.py +++ b/posthog/models/cohort/test/test_util.py @@ -1,5 +1,8 @@ from posthog.models.cohort import Cohort -from posthog.models.cohort.util import get_dependent_cohorts, simplified_cohort_filter_properties +from posthog.models.cohort.util import ( + get_dependent_cohorts, + simplified_cohort_filter_properties, +) from posthog.test.base import BaseTest, _create_person, flush_persons_and_events @@ -14,8 +17,11 @@ def _create_cohort(**kwargs): class TestCohortUtils(BaseTest): def test_simplified_cohort_filter_properties_static_cohort(self): - - _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "name": "test"}, + ) cohort = _create_cohort(team=self.team, name="cohort1", groups=[], is_static=True) flush_persons_and_events() cohort.insert_users_by_list(["p1"]) @@ -24,12 +30,25 @@ def test_simplified_cohort_filter_properties_static_cohort(self): self.assertEqual( result.to_dict(), - {"type": "AND", "values": [{"key": "id", "negation": False, "type": "static-cohort", "value": cohort.pk}]}, + { + "type": "AND", + "values": [ + { + "key": "id", + "negation": False, + "type": "static-cohort", + "value": cohort.pk, + } + ], + }, ) def test_simplified_cohort_filter_properties_static_cohort_with_negation(self): - - _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "test", "name": "test"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"name": "test", "name": "test"}, + ) cohort = _create_cohort(team=self.team, name="cohort1", groups=[], is_static=True) flush_persons_and_events() cohort.insert_users_by_list(["p1"]) @@ -38,7 +57,17 @@ def test_simplified_cohort_filter_properties_static_cohort_with_negation(self): self.assertEqual( result.to_dict(), - {"type": "AND", "values": [{"key": "id", "negation": True, "type": "static-cohort", "value": cohort.pk}]}, + { + "type": "AND", + "values": [ + { + "key": "id", + "negation": True, + "type": "static-cohort", + "value": cohort.pk, + } + ], + }, ) def test_simplified_cohort_filter_properties_precalculated_cohort(self): @@ -57,7 +86,14 @@ def test_simplified_cohort_filter_properties_precalculated_cohort(self): result.to_dict(), { "type": "AND", - "values": [{"key": "id", "negation": False, "type": "precalculated-cohort", "value": cohort.pk}], + "values": [ + { + "key": "id", + "negation": False, + "type": "precalculated-cohort", + "value": cohort.pk, + } + ], }, ) @@ -77,7 +113,14 @@ def test_simplified_cohort_filter_properties_precalculated_cohort_negated(self): result.to_dict(), { "type": "AND", - "values": [{"key": "id", "negation": True, "type": "precalculated-cohort", "value": cohort.pk}], + "values": [ + { + "key": "id", + "negation": True, + "type": "precalculated-cohort", + "value": cohort.pk, + } + ], }, ) @@ -113,7 +156,17 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_behav self.assertEqual( result.to_dict(), - {"type": "AND", "values": [{"key": "id", "negation": False, "type": "cohort", "value": cohort.pk}]}, + { + "type": "AND", + "values": [ + { + "key": "id", + "negation": False, + "type": "cohort", + "value": cohort.pk, + } + ], + }, ) # with negation @@ -122,7 +175,17 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_behav self.assertEqual( result.to_dict(), - {"type": "AND", "values": [{"key": "id", "negation": True, "type": "cohort", "value": cohort.pk}]}, + { + "type": "AND", + "values": [ + { + "key": "id", + "negation": True, + "type": "cohort", + "value": cohort.pk, + } + ], + }, ) def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_cohort_filter(self): @@ -139,7 +202,12 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_cohor "type": "AND", "values": [ {"key": "name", "value": "test", "type": "person"}, - {"key": "id", "value": cohort1.pk, "type": "cohort", "negation": True}, + { + "key": "id", + "value": cohort1.pk, + "type": "cohort", + "negation": True, + }, ], } }, @@ -154,11 +222,19 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_cohor { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "name", "value": "test", "type": "person"}]}, + { + "type": "AND", + "values": [{"key": "name", "value": "test", "type": "person"}], + }, { "type": "AND", "values": [ - {"key": "id", "value": cohort1.pk, "type": "cohort", "negation": True}, + { + "key": "id", + "value": cohort1.pk, + "type": "cohort", + "negation": True, + }, ], }, ], @@ -171,7 +247,17 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_cohor self.assertEqual( result.to_dict(), - {"type": "AND", "values": [{"key": "id", "negation": True, "type": "cohort", "value": cohort.pk}]}, + { + "type": "AND", + "values": [ + { + "key": "id", + "negation": True, + "type": "cohort", + "value": cohort.pk, + } + ], + }, ) def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_only_person_property_filters(self): @@ -182,7 +268,10 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_only_ "properties": { "type": "OR", "values": [ - {"type": "AND", "values": [{"key": "name", "value": "test", "type": "person"}]}, + { + "type": "AND", + "values": [{"key": "name", "value": "test", "type": "person"}], + }, { "type": "OR", "values": [ @@ -204,7 +293,10 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_only_ { "type": "OR", "values": [ - {"type": "AND", "values": [{"key": "name", "value": "test", "type": "person"}]}, + { + "type": "AND", + "values": [{"key": "name", "value": "test", "type": "person"}], + }, { "type": "OR", "values": [ @@ -222,7 +314,17 @@ def test_simplified_cohort_filter_properties_non_precalculated_cohort_with_only_ self.assertEqual( result.to_dict(), - {"type": "AND", "values": [{"key": "id", "negation": True, "type": "cohort", "value": cohort.pk}]}, + { + "type": "AND", + "values": [ + { + "key": "id", + "negation": True, + "type": "cohort", + "value": cohort.pk, + } + ], + }, ) @@ -268,7 +370,18 @@ def test_dependent_cohorts_for_deeply_nested_cohort(self): cohort3 = _create_cohort( team=self.team, name="cohort3", - groups=[{"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True}]}], + groups=[ + { + "properties": [ + { + "key": "id", + "value": cohort2.pk, + "type": "cohort", + "negation": True, + } + ] + } + ], ) self.assertEqual(get_dependent_cohorts(cohort1), []) @@ -291,7 +404,18 @@ def test_dependent_cohorts_for_circular_nested_cohort(self): cohort3 = _create_cohort( team=self.team, name="cohort1", - groups=[{"properties": [{"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True}]}], + groups=[ + { + "properties": [ + { + "key": "id", + "value": cohort2.pk, + "type": "cohort", + "negation": True, + } + ] + } + ], ) cohort1.groups = [{"properties": [{"key": "id", "value": cohort3.pk, "type": "cohort"}]}] @@ -328,7 +452,12 @@ def test_dependent_cohorts_for_complex_nested_cohort(self): { "properties": [ {"key": "name", "value": "test3", "type": "person"}, - {"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True}, + { + "key": "id", + "value": cohort2.pk, + "type": "cohort", + "negation": True, + }, ] } ], @@ -337,7 +466,18 @@ def test_dependent_cohorts_for_complex_nested_cohort(self): cohort4 = _create_cohort( team=self.team, name="cohort1", - groups=[{"properties": [{"key": "id", "value": cohort1.pk, "type": "cohort", "negation": True}]}], + groups=[ + { + "properties": [ + { + "key": "id", + "value": cohort1.pk, + "type": "cohort", + "negation": True, + } + ] + } + ], ) cohort5 = _create_cohort( @@ -346,8 +486,18 @@ def test_dependent_cohorts_for_complex_nested_cohort(self): groups=[ { "properties": [ - {"key": "id", "value": cohort2.pk, "type": "cohort", "negation": True}, - {"key": "id", "value": cohort4.pk, "type": "cohort", "negation": True}, + { + "key": "id", + "value": cohort2.pk, + "type": "cohort", + "negation": True, + }, + { + "key": "id", + "value": cohort4.pk, + "type": "cohort", + "negation": True, + }, ] } ], diff --git a/posthog/models/cohort/util.py b/posthog/models/cohort/util.py index c5b8c39ec4f1d..800b937d51f15 100644 --- a/posthog/models/cohort/util.py +++ b/posthog/models/cohort/util.py @@ -52,7 +52,11 @@ def format_person_query(cohort: Cohort, index: int, hogql_context: HogQLContext) from posthog.queries.cohort_query import CohortQuery query_builder = CohortQuery( - Filter(data={"properties": cohort.properties}, team=cohort.team, hogql_context=hogql_context), + Filter( + data={"properties": cohort.properties}, + team=cohort.team, + hogql_context=hogql_context, + ), cohort.team, cohort_pk=cohort.pk, ) @@ -72,7 +76,13 @@ def format_static_cohort_query(cohort: Cohort, index: int, prepend: str) -> Tupl def format_precalculated_cohort_query(cohort: Cohort, index: int, prepend: str = "") -> Tuple[str, Dict[str, Any]]: filter_query = GET_PERSON_ID_BY_PRECALCULATED_COHORT_ID.format(index=index, prepend=prepend) - return (filter_query, {f"{prepend}_cohort_id_{index}": cohort.pk, f"{prepend}_version_{index}": cohort.version}) + return ( + filter_query, + { + f"{prepend}_cohort_id_{index}": cohort.pk, + f"{prepend}_version_{index}": cohort.version, + }, + ) def get_count_operator(count_operator: Optional[str]) -> str: @@ -102,7 +112,10 @@ def get_entity_query( elif action_id: action = Action.objects.get(pk=action_id, team_id=team_id) action_filter_query, action_params = format_action_filter( - team_id=team_id, action=action, prepend="_{}_action".format(group_idx), hogql_context=hogql_context + team_id=team_id, + action=action, + prepend="_{}_action".format(group_idx), + hogql_context=hogql_context, ) return action_filter_query, action_params else: @@ -128,7 +141,10 @@ def parse_entity_timestamps_in_days(days: int) -> Tuple[str, Dict[str, str]]: return ( "AND timestamp >= %(date_from)s AND timestamp <= %(date_to)s", - {"date_from": start_time.strftime("%Y-%m-%d %H:%M:%S"), "date_to": curr_time.strftime("%Y-%m-%d %H:%M:%S")}, + { + "date_from": start_time.strftime("%Y-%m-%d %H:%M:%S"), + "date_to": curr_time.strftime("%Y-%m-%d %H:%M:%S"), + }, ) @@ -142,7 +158,10 @@ def parse_cohort_timestamps(start_time: Optional[str], end_time: Optional[str]) params = {"date_from": datetime.strptime(start_time, "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S")} if end_time: clause += "timestamp <= %(date_to)s" - params = {**params, "date_to": datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S")} + params = { + **params, + "date_to": datetime.strptime(end_time, "%Y-%m-%dT%H:%M:%S").strftime("%Y-%m-%d %H:%M:%S"), + } return clause, params @@ -177,7 +196,10 @@ def format_filter_query( def format_cohort_subquery( - cohort: Cohort, index: int, hogql_context: HogQLContext, custom_match_field="person_id" + cohort: Cohort, + index: int, + hogql_context: HogQLContext, + custom_match_field="person_id", ) -> Tuple[str, Dict[str, Any]]: is_precalculated = is_precalculated_query(cohort) if is_precalculated: @@ -189,7 +211,12 @@ def format_cohort_subquery( return person_query, params -def get_person_ids_by_cohort_id(team: Team, cohort_id: int, limit: Optional[int] = None, offset: Optional[int] = None): +def get_person_ids_by_cohort_id( + team: Team, + cohort_id: int, + limit: Optional[int] = None, + offset: Optional[int] = None, +): from posthog.models.property.util import parse_prop_grouped_clauses filter = Filter(data={"properties": [{"key": "id", "value": cohort_id, "type": "cohort"}]}) @@ -254,7 +281,10 @@ def recalculate_cohortpeople(cohort: Cohort, pending_version: int) -> Optional[i if before_count: logger.warn( - "Recalculating cohortpeople starting", team_id=cohort.team_id, cohort_id=cohort.pk, size_before=before_count + "Recalculating cohortpeople starting", + team_id=cohort.team_id, + cohort_id=cohort.pk, + size_before=before_count, ) recalcluate_cohortpeople_sql = RECALCULATE_COHORT_BY_ID.format(cohort_filter=cohort_query) @@ -289,7 +319,11 @@ def clear_stale_cohortpeople(cohort: Cohort, before_version: int) -> None: if cohort.version and cohort.version > 0: stale_count_result = sync_execute( STALE_COHORTPEOPLE, - {"cohort_id": cohort.pk, "team_id": cohort.team_id, "version": before_version}, + { + "cohort_id": cohort.pk, + "team_id": cohort.team_id, + "version": before_version, + }, ) if stale_count_result and len(stale_count_result) and len(stale_count_result[0]): @@ -333,7 +367,14 @@ def simplified_cohort_filter_properties(cohort: Cohort, team: Team, is_negated=F if is_precalculated_query(cohort): return PropertyGroup( type=PropertyOperatorType.AND, - values=[Property(type="precalculated-cohort", key="id", value=cohort.pk, negation=is_negated)], + values=[ + Property( + type="precalculated-cohort", + key="id", + value=cohort.pk, + negation=is_negated, + ) + ], ) # Cohort can have multiple match groups. @@ -356,7 +397,14 @@ def simplified_cohort_filter_properties(cohort: Cohort, team: Team, is_negated=F if is_negated: return PropertyGroup( type=PropertyOperatorType.AND, - values=[Property(type="cohort", key="id", value=cohort.pk, negation=is_negated)], + values=[ + Property( + type="cohort", + key="id", + value=cohort.pk, + negation=is_negated, + ) + ], ) # :TRICKY: We need to ensure we don't have infinite loops in here # guaranteed during cohort creation @@ -390,7 +438,9 @@ def get_all_cohort_ids_by_person_uuid(uuid: str, team_id: int) -> List[int]: def get_dependent_cohorts( - cohort: Cohort, using_database: str = "default", seen_cohorts_cache: Optional[Dict[str, Cohort]] = None + cohort: Cohort, + using_database: str = "default", + seen_cohorts_cache: Optional[Dict[str, Cohort]] = None, ) -> List[Cohort]: if seen_cohorts_cache is None: seen_cohorts_cache = {} diff --git a/posthog/models/dashboard.py b/posthog/models/dashboard.py index d2a477d97e0f8..f20fc9fdcb0f2 100644 --- a/posthog/models/dashboard.py +++ b/posthog/models/dashboard.py @@ -18,14 +18,23 @@ class Dashboard(models.Model): class CreationMode(models.TextChoices): DEFAULT = "default", "Default" - TEMPLATE = "template", "Template" # dashboard was created from a predefined template - DUPLICATE = "duplicate", "Duplicate" # dashboard was duplicated from another dashboard + TEMPLATE = ( + "template", + "Template", + ) # dashboard was created from a predefined template + DUPLICATE = ( + "duplicate", + "Duplicate", + ) # dashboard was duplicated from another dashboard class RestrictionLevel(models.IntegerChoices): """Collaboration restriction level (which is a dashboard setting). Sync with PrivilegeLevel.""" EVERYONE_IN_PROJECT_CAN_EDIT = 21, "Everyone in the project can edit" - ONLY_COLLABORATORS_CAN_EDIT = 37, "Only those invited to this dashboard can edit" + ONLY_COLLABORATORS_CAN_EDIT = ( + 37, + "Only those invited to this dashboard can edit", + ) class PrivilegeLevel(models.IntegerChoices): """Collaboration privilege level (which is a user property). Sync with RestrictionLevel.""" @@ -43,15 +52,25 @@ class PrivilegeLevel(models.IntegerChoices): last_accessed_at: models.DateTimeField = models.DateTimeField(blank=True, null=True) filters: models.JSONField = models.JSONField(default=dict) creation_mode: models.CharField = models.CharField(max_length=16, default="default", choices=CreationMode.choices) - restriction_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( - default=RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, choices=RestrictionLevel.choices + restriction_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField( + default=RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, + choices=RestrictionLevel.choices, + ) + insights = models.ManyToManyField( + "posthog.Insight", + related_name="dashboards", + through="DashboardTile", + blank=True, ) - insights = models.ManyToManyField("posthog.Insight", related_name="dashboards", through="DashboardTile", blank=True) # Deprecated in favour of app-wide tagging model. See EnterpriseTaggedItem deprecated_tags: ArrayField = ArrayField(models.CharField(max_length=32), null=True, blank=True, default=list) deprecated_tags_v2: ArrayField = ArrayField( - models.CharField(max_length=32), null=True, blank=True, default=None, db_column="tags" + models.CharField(max_length=32), + null=True, + blank=True, + default=None, + db_column="tags", ) # DEPRECATED: using the new "sharing" relation instead diff --git a/posthog/models/dashboard_tile.py b/posthog/models/dashboard_tile.py index ed4a885bfc1c1..7cc6b2601cb62 100644 --- a/posthog/models/dashboard_tile.py +++ b/posthog/models/dashboard_tile.py @@ -16,7 +16,11 @@ class Text(models.Model): created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now) last_modified_by: models.ForeignKey = models.ForeignKey( - "User", on_delete=models.SET_NULL, null=True, blank=True, related_name="modified_text_tiles" + "User", + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="modified_text_tiles", ) team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE) @@ -33,8 +37,18 @@ class DashboardTile(models.Model): # Relations dashboard = models.ForeignKey("posthog.Dashboard", on_delete=models.CASCADE, related_name="tiles") - insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE, related_name="dashboard_tiles", null=True) - text = models.ForeignKey("posthog.Text", on_delete=models.CASCADE, related_name="dashboard_tiles", null=True) + insight = models.ForeignKey( + "posthog.Insight", + on_delete=models.CASCADE, + related_name="dashboard_tiles", + null=True, + ) + text = models.ForeignKey( + "posthog.Text", + on_delete=models.CASCADE, + related_name="dashboard_tiles", + null=True, + ) # Tile layout and style layouts: models.JSONField = models.JSONField(default=dict) @@ -57,9 +71,14 @@ class Meta: condition=Q(("insight__isnull", False)), ), UniqueConstraint( - fields=["dashboard", "text"], name=f"unique_dashboard_text", condition=Q(("text__isnull", False)) + fields=["dashboard", "text"], + name=f"unique_dashboard_text", + condition=Q(("text__isnull", False)), + ), + models.CheckConstraint( + check=build_check(("insight", "text")), + name="dash_tile_exactly_one_related_object", ), - models.CheckConstraint(check=build_check(("insight", "text")), name="dash_tile_exactly_one_related_object"), ] @property @@ -94,7 +113,11 @@ def save(self, *args, **kwargs) -> None: def copy_to_dashboard(self, dashboard: Dashboard) -> None: DashboardTile.objects.create( - dashboard=dashboard, insight=self.insight, text=self.text, color=self.color, layouts=self.layouts + dashboard=dashboard, + insight=self.insight, + text=self.text, + color=self.color, + layouts=self.layouts, ) @staticmethod diff --git a/posthog/models/early_access_feature.py b/posthog/models/early_access_feature.py index e73c463b96a09..3ec1c99543b9a 100644 --- a/posthog/models/early_access_feature.py +++ b/posthog/models/early_access_feature.py @@ -12,7 +12,10 @@ class Stage(models.TextChoices): ARCHIVED = "archived", "archived" team: models.ForeignKey = models.ForeignKey( - "posthog.Team", on_delete=models.CASCADE, related_name="features", related_query_name="feature" + "posthog.Team", + on_delete=models.CASCADE, + related_name="features", + related_query_name="feature", ) feature_flag: models.ForeignKey = models.ForeignKey( "posthog.FeatureFlag", diff --git a/posthog/models/element_group.py b/posthog/models/element_group.py index d248ba9d25739..3d399f2559844 100644 --- a/posthog/models/element_group.py +++ b/posthog/models/element_group.py @@ -30,7 +30,8 @@ def create(self, *args: Any, **kwargs: Any): group = super().create(*args, **kwargs) except: return ElementGroup.objects.get( - hash=kwargs["hash"], team_id=kwargs["team"].pk if kwargs.get("team") else kwargs["team_id"] + hash=kwargs["hash"], + team_id=kwargs["team"].pk if kwargs.get("team") else kwargs["team_id"], ) for element in elements: element.group = group diff --git a/posthog/models/entity/entity.py b/posthog/models/entity/entity.py index 8f62e5ea98aba..aced3a18a8842 100644 --- a/posthog/models/entity/entity.py +++ b/posthog/models/entity/entity.py @@ -65,7 +65,10 @@ class Entity(PropertyMixin): def __init__(self, data: Dict[str, Any]) -> None: self.id = data.get("id") - if data.get("type") not in [TREND_FILTER_TYPE_ACTIONS, TREND_FILTER_TYPE_EVENTS]: + if data.get("type") not in [ + TREND_FILTER_TYPE_ACTIONS, + TREND_FILTER_TYPE_EVENTS, + ]: raise ValueError("Type needs to be either TREND_FILTER_TYPE_ACTIONS or TREND_FILTER_TYPE_EVENTS") self.type = data["type"] order_provided = data.get("order") @@ -150,7 +153,15 @@ def get_action(self) -> Action: raise ValidationError(f"Action ID {self.id} does not exist!") __repr__ = sane_repr( - "id", "type", "order", "name", "custom_name", "math", "math_property", "math_hogql", "properties" + "id", + "type", + "order", + "name", + "custom_name", + "math", + "math_property", + "math_hogql", + "properties", ) diff --git a/posthog/models/event/query_event_list.py b/posthog/models/event/query_event_list.py index 527bfb62645ea..de70d511da156 100644 --- a/posthog/models/event/query_event_list.py +++ b/posthog/models/event/query_event_list.py @@ -87,7 +87,10 @@ def query_events_list( tzinfo=team.timezone_info, ) prop_filters, prop_filter_params = parse_prop_grouped_clauses( - team_id=team.pk, property_group=filter.property_groups, has_person_id_joined=False, hogql_context=hogql_context + team_id=team.pk, + property_group=filter.property_groups, + has_person_id_joined=False, + hogql_context=hogql_context, ) if action_id: @@ -106,7 +109,10 @@ def query_events_list( if prop_filters != "": return insight_query_with_columns( SELECT_EVENT_BY_TEAM_AND_CONDITIONS_FILTERS_SQL.format( - conditions=conditions, limit=limit_sql, filters=prop_filters, order=order + conditions=conditions, + limit=limit_sql, + filters=prop_filters, + order=order, ), { "team_id": team.pk, diff --git a/posthog/models/event/util.py b/posthog/models/event/util.py index 7deb2ee87b291..2cd36b34e1dd2 100644 --- a/posthog/models/event/util.py +++ b/posthog/models/event/util.py @@ -12,7 +12,11 @@ from posthog.kafka_client.client import ClickhouseProducer from posthog.kafka_client.topics import KAFKA_EVENTS_JSON from posthog.models import Group -from posthog.models.element.element import Element, chain_to_elements, elements_to_string +from posthog.models.element.element import ( + Element, + chain_to_elements, + elements_to_string, +) from posthog.models.event.sql import BULK_INSERT_EVENT_SQL, INSERT_EVENT_SQL from posthog.models.person import Person from posthog.models.team import Team @@ -167,7 +171,8 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di else: try: person = Person.objects.get( - persondistinctid__distinct_id=event["distinct_id"], persondistinctid__team_id=team_id + persondistinctid__distinct_id=event["distinct_id"], + persondistinctid__team_id=team_id, ) person_properties = person.properties person_id = person.uuid @@ -179,7 +184,10 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di event = { **event, - "person_properties": {**person_properties, **event.get("person_properties", {})}, + "person_properties": { + **person_properties, + **event.get("person_properties", {}), + }, "person_id": person_id, "person_created_at": person_created_at, } @@ -189,13 +197,20 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di if property_key.startswith("$group_"): group_type_index = property_key[-1] try: - group = Group.objects.get(team_id=team_id, group_type_index=group_type_index, group_key=value) + group = Group.objects.get( + team_id=team_id, + group_type_index=group_type_index, + group_key=value, + ) group_property_key = f"group{group_type_index}_properties" group_created_at_key = f"group{group_type_index}_created_at" event = { **event, - group_property_key: {**group.group_properties, **event.get(group_property_key, {})}, + group_property_key: { + **group.group_properties, + **event.get(group_property_key, {}), + }, group_created_at_key: event.get(group_created_at_key, datetime64_default_timestamp), } @@ -238,7 +253,10 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di else datetime64_default_timestamp, } - params = {**params, **{"{}_{}".format(key, index): value for key, value in event.items()}} + params = { + **params, + **{"{}_{}".format(key, index): value for key, value in event.items()}, + } sync_execute(BULK_INSERT_EVENT_SQL() + ", ".join(inserts), params, flush=False) diff --git a/posthog/models/event_definition.py b/posthog/models/event_definition.py index 7e2aa00d48819..5b22a9e6a2869 100644 --- a/posthog/models/event_definition.py +++ b/posthog/models/event_definition.py @@ -8,7 +8,10 @@ class EventDefinition(UUIDModel): team: models.ForeignKey = models.ForeignKey( - Team, on_delete=models.CASCADE, related_name="event_definitions", related_query_name="team" + Team, + on_delete=models.CASCADE, + related_name="event_definitions", + related_query_name="team", ) name: models.CharField = models.CharField(max_length=400) created_at: models.DateTimeField = models.DateTimeField(default=timezone.now, null=True) @@ -26,7 +29,9 @@ class Meta: unique_together = ("team", "name") indexes = [ GinIndex( - name="index_event_definition_name", fields=["name"], opclasses=["gin_trgm_ops"] + name="index_event_definition_name", + fields=["name"], + opclasses=["gin_trgm_ops"], ) # To speed up DB-based fuzzy searching ] diff --git a/posthog/models/event_property.py b/posthog/models/event_property.py index 458567c376ab4..4824248ddfce2 100644 --- a/posthog/models/event_property.py +++ b/posthog/models/event_property.py @@ -12,9 +12,13 @@ class EventProperty(models.Model): class Meta: constraints = [ models.UniqueConstraint( - fields=["team", "event", "property"], name="posthog_event_property_unique_team_event_property" + fields=["team", "event", "property"], + name="posthog_event_property_unique_team_event_property", ) ] - indexes = [models.Index(fields=["team", "event"]), models.Index(fields=["team", "property"])] + indexes = [ + models.Index(fields=["team", "event"]), + models.Index(fields=["team", "property"]), + ] __repr__ = sane_repr("event", "property", "team_id") diff --git a/posthog/models/exported_asset.py b/posthog/models/exported_asset.py index eb3bf961c9aaa..675245e867634 100644 --- a/posthog/models/exported_asset.py +++ b/posthog/models/exported_asset.py @@ -96,7 +96,11 @@ def file_ext(self): return self.export_format.split("/")[1] def get_analytics_metadata(self): - return {"export_format": self.export_format, "dashboard_id": self.dashboard_id, "insight_id": self.insight_id} + return { + "export_format": self.export_format, + "dashboard_id": self.dashboard_id, + "insight_id": self.insight_id, + } def get_public_content_url(self, expiry_delta: Optional[timedelta] = None): token = get_public_access_token(self, expiry_delta) @@ -112,7 +116,11 @@ def delete_expired_assets(cls): def get_public_access_token(asset: ExportedAsset, expiry_delta: Optional[timedelta] = None) -> str: if not expiry_delta: expiry_delta = timedelta(days=PUBLIC_ACCESS_TOKEN_EXP_DAYS) - return encode_jwt({"id": asset.id}, expiry_delta=expiry_delta, audience=PosthogJwtAudience.EXPORTED_ASSET) + return encode_jwt( + {"id": asset.id}, + expiry_delta=expiry_delta, + audience=PosthogJwtAudience.EXPORTED_ASSET, + ) def asset_for_token(token: str) -> ExportedAsset: @@ -153,7 +161,10 @@ def save_content(exported_asset: ExportedAsset, content: bytes) -> None: except ObjectStorageError as ose: capture_exception(ose) logger.error( - "exported_asset.object-storage-error", exported_asset_id=exported_asset.id, exception=ose, exc_info=True + "exported_asset.object-storage-error", + exported_asset_id=exported_asset.id, + exception=ose, + exc_info=True, ) save_content_to_exported_asset(exported_asset, content) diff --git a/posthog/models/feature_flag/feature_flag.py b/posthog/models/feature_flag/feature_flag.py index 29d8e89296e49..b45271bb16845 100644 --- a/posthog/models/feature_flag/feature_flag.py +++ b/posthog/models/feature_flag/feature_flag.py @@ -8,7 +8,10 @@ from django.utils import timezone from sentry_sdk.api import capture_exception -from posthog.constants import ENRICHED_DASHBOARD_INSIGHT_IDENTIFIER, PropertyOperatorType +from posthog.constants import ( + ENRICHED_DASHBOARD_INSIGHT_IDENTIFIER, + PropertyOperatorType, +) from posthog.models.cohort import Cohort from posthog.models.experiment import Experiment from posthog.models.property import GroupTypeIndex @@ -120,12 +123,17 @@ def get_filters(self): # We don't want to migrate to avoid /decide endpoint downtime until this code has been deployed return { "groups": [ - {"properties": self.filters.get("properties", []), "rollout_percentage": self.rollout_percentage} + { + "properties": self.filters.get("properties", []), + "rollout_percentage": self.rollout_percentage, + } ], } def transform_cohort_filters_for_easy_evaluation( - self, using_database: str = "default", seen_cohorts_cache: Optional[Dict[str, Cohort]] = None + self, + using_database: str = "default", + seen_cohorts_cache: Optional[Dict[str, Cohort]] = None, ): """ Expands cohort filters into person property filters when possible. @@ -248,7 +256,9 @@ def transform_cohort_filters_for_easy_evaluation( return parsed_conditions def get_cohort_ids( - self, using_database: str = "default", seen_cohorts_cache: Optional[Dict[str, Cohort]] = None + self, + using_database: str = "default", + seen_cohorts_cache: Optional[Dict[str, Cohort]] = None, ) -> List[int]: from posthog.models.cohort.util import get_dependent_cohorts @@ -274,7 +284,9 @@ def get_cohort_ids( [ dependent_cohort.pk for dependent_cohort in get_dependent_cohorts( - cohort, using_database=using_database, seen_cohorts_cache=seen_cohorts_cache + cohort, + using_database=using_database, + seen_cohorts_cache=seen_cohorts_cache, ) ] ) @@ -310,7 +322,8 @@ class FeatureFlagHashKeyOverride(models.Model): class Meta: constraints = [ models.UniqueConstraint( - fields=["team", "person", "feature_flag_key"], name="Unique hash_key for a user/team/feature_flag combo" + fields=["team", "person", "feature_flag_key"], + name="Unique hash_key for a user/team/feature_flag combo", ) ] @@ -329,7 +342,8 @@ class FeatureFlagOverride(models.Model): class Meta: constraints = [ models.UniqueConstraint( - fields=["user", "feature_flag", "team"], name="unique feature flag for a user/team combo" + fields=["user", "feature_flag", "team"], + name="unique feature flag for a user/team combo", ) ] @@ -340,7 +354,9 @@ class Meta: def set_feature_flags_for_team_in_cache( - team_id: int, feature_flags: Optional[List[FeatureFlag]] = None, using_database: str = "default" + team_id: int, + feature_flags: Optional[List[FeatureFlag]] = None, + using_database: str = "default", ) -> List[FeatureFlag]: from posthog.api.feature_flag import MinimalFeatureFlagSerializer @@ -391,5 +407,8 @@ class FeatureFlagDashboards(models.Model): class Meta: constraints = [ - models.UniqueConstraint(fields=["feature_flag", "dashboard"], name="unique feature flag for a dashboard") + models.UniqueConstraint( + fields=["feature_flag", "dashboard"], + name="unique feature flag for a dashboard", + ) ] diff --git a/posthog/models/feature_flag/flag_analytics.py b/posthog/models/feature_flag/flag_analytics.py index e949de479d166..367c836f75882 100644 --- a/posthog/models/feature_flag/flag_analytics.py +++ b/posthog/models/feature_flag/flag_analytics.py @@ -1,6 +1,8 @@ from typing import TYPE_CHECKING, Tuple from posthog.constants import FlagRequestType -from posthog.helpers.dashboard_templates import add_enriched_insights_to_feature_flag_dashboard +from posthog.helpers.dashboard_templates import ( + add_enriched_insights_to_feature_flag_dashboard, +) from posthog.models.feature_flag.feature_flag import FeatureFlag from posthog.redis import redis, get_client import time @@ -68,9 +70,11 @@ def capture_team_decide_usage(ph_client: "Posthog", team_id: int, team_uuid: str with client.lock(f"{REDIS_LOCK_TOKEN}:{team_id}", timeout=60, blocking=False): decide_key_name = get_team_request_key(team_id, FlagRequestType.DECIDE) - total_decide_request_count, min_time, max_time = _extract_total_count_for_key_from_redis_hash( - client, decide_key_name - ) + ( + total_decide_request_count, + min_time, + max_time, + ) = _extract_total_count_for_key_from_redis_hash(client, decide_key_name) if total_decide_request_count > 0 and settings.DECIDE_BILLING_ANALYTICS_TOKEN: ph_client.capture( @@ -87,9 +91,11 @@ def capture_team_decide_usage(ph_client: "Posthog", team_id: int, team_uuid: str ) local_evaluation_key_name = get_team_request_key(team_id, FlagRequestType.LOCAL_EVALUATION) - total_local_evaluation_request_count, min_time, max_time = _extract_total_count_for_key_from_redis_hash( - client, local_evaluation_key_name - ) + ( + total_local_evaluation_request_count, + min_time, + max_time, + ) = _extract_total_count_for_key_from_redis_hash(client, local_evaluation_key_name) if total_local_evaluation_request_count > 0 and settings.DECIDE_BILLING_ANALYTICS_TOKEN: ph_client.capture( @@ -113,7 +119,6 @@ def capture_team_decide_usage(ph_client: "Posthog", team_id: int, team_uuid: str def find_flags_with_enriched_analytics(begin: datetime, end: datetime): - result = sync_execute( """ SELECT team_id, JSONExtractString(properties, 'feature_flag') as flag_key diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py index 059b60d7211f8..05c5bbccb8f63 100644 --- a/posthog/models/feature_flag/flag_matching.py +++ b/posthog/models/feature_flag/flag_matching.py @@ -25,7 +25,10 @@ from posthog.models.cohort import Cohort from posthog.models.utils import execute_with_timeout from posthog.queries.base import match_property, properties_to_Q -from posthog.database_healthcheck import postgres_healthcheck, DATABASE_FOR_FLAG_MATCHING +from posthog.database_healthcheck import ( + postgres_healthcheck, + DATABASE_FOR_FLAG_MATCHING, +) from posthog.utils import label_for_team_id_to_track from .feature_flag import ( @@ -156,7 +159,11 @@ def get_match(self, feature_flag: FeatureFlag) -> FeatureFlagMatch: # Match for boolean super condition first if feature_flag.filters.get("super_groups", None): - is_match, super_condition_value, evaluation_reason = self.is_super_condition_match(feature_flag) + ( + is_match, + super_condition_value, + evaluation_reason, + ) = self.is_super_condition_match(feature_flag) if is_match: payload = self.get_matching_payload(super_condition_value, None, feature_flag) return FeatureFlagMatch( @@ -184,11 +191,18 @@ def get_match(self, feature_flag: FeatureFlag) -> FeatureFlagMatch: payload = self.get_matching_payload(is_match, variant, feature_flag) return FeatureFlagMatch( - match=True, variant=variant, reason=evaluation_reason, condition_index=index, payload=payload + match=True, + variant=variant, + reason=evaluation_reason, + condition_index=index, + payload=payload, ) - highest_priority_evaluation_reason, highest_priority_index = self.get_highest_priority_match_evaluation( - highest_priority_evaluation_reason, highest_priority_index, evaluation_reason, index + (highest_priority_evaluation_reason, highest_priority_index,) = self.get_highest_priority_match_evaluation( + highest_priority_evaluation_reason, + highest_priority_index, + evaluation_reason, + index, ) return FeatureFlagMatch( @@ -227,7 +241,12 @@ def get_matches(self) -> Tuple[Dict[str, Union[str, bool]], Dict[str, dict], Dic faced_error_computing_flags = True handle_feature_flag_exception(err, "[Feature Flags] Error computing flags") - return flag_values, flag_evaluation_reasons, flag_payloads, faced_error_computing_flags + return ( + flag_values, + flag_evaluation_reasons, + flag_payloads, + faced_error_computing_flags, + ) def get_matching_variant(self, feature_flag: FeatureFlag) -> Optional[str]: for variant in self.variant_lookup_table(feature_flag): @@ -259,7 +278,11 @@ def is_super_condition_match(self, feature_flag: FeatureFlag) -> Tuple[bool, boo super_condition_value = self._super_condition_matches(feature_flag) if super_condition_value_is_set: - return True, super_condition_value, FeatureFlagMatchReason.SUPER_CONDITION_VALUE + return ( + True, + super_condition_value, + FeatureFlagMatchReason.SUPER_CONDITION_VALUE, + ) # Evaluate if properties are empty if feature_flag.super_conditions and len(feature_flag.super_conditions) > 0: @@ -290,7 +313,8 @@ def is_condition_match( target_properties = self.property_value_overrides if feature_flag.aggregation_group_type_index is not None: target_properties = self.group_property_value_overrides.get( - self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index], {} + self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index], + {}, ) condition_match = all(match_property(property, target_properties) for property in properties) else: @@ -344,7 +368,9 @@ def query_conditions(self) -> Dict[str, bool]: all_conditions: Dict = {} team_id = self.feature_flags[0].team_id person_query: QuerySet = Person.objects.using(DATABASE_FOR_FLAG_MATCHING).filter( - team_id=team_id, persondistinctid__distinct_id=self.distinct_id, persondistinctid__team_id=team_id + team_id=team_id, + persondistinctid__distinct_id=self.distinct_id, + persondistinctid__team_id=team_id, ) basic_group_query: QuerySet = Group.objects.using(DATABASE_FOR_FLAG_MATCHING).filter(team_id=team_id) group_query_per_group_type_mapping: Dict[GroupTypeIndex, Tuple[QuerySet, List[str]]] = {} @@ -372,7 +398,8 @@ def condition_eval(key, condition): target_properties = self.property_value_overrides if feature_flag.aggregation_group_type_index is not None: target_properties = self.group_property_value_overrides.get( - self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index], {} + self.cache.group_type_index_to_name[feature_flag.aggregation_group_type_index], + {}, ) expr = properties_to_Q( Filter(data=condition).property_groups.flat, @@ -401,7 +428,8 @@ def condition_eval(key, condition): person_query = person_query.annotate( **{ key: ExpressionWrapper( - expr if expr else RawSQL("true", []), output_field=BooleanField() + expr if expr else RawSQL("true", []), + output_field=BooleanField(), ) } ) @@ -410,13 +438,15 @@ def condition_eval(key, condition): if feature_flag.aggregation_group_type_index not in group_query_per_group_type_mapping: # ignore flags that didn't have the right groups passed in return - group_query, group_fields = group_query_per_group_type_mapping[ - feature_flag.aggregation_group_type_index - ] + ( + group_query, + group_fields, + ) = group_query_per_group_type_mapping[feature_flag.aggregation_group_type_index] group_query = group_query.annotate( **{ key: ExpressionWrapper( - expr if expr else RawSQL("true", []), output_field=BooleanField() + expr if expr else RawSQL("true", []), + output_field=BooleanField(), ) } ) @@ -468,7 +498,10 @@ def condition_eval(key, condition): if len(person_query) > 0: all_conditions = {**all_conditions, **person_query[0]} - for group_query, group_fields in group_query_per_group_type_mapping.values(): + for ( + group_query, + group_fields, + ) in group_query_per_group_type_mapping.values(): group_query = group_query.values(*group_fields) if len(group_query) > 0: assert len(group_query) == 1, f"Expected 1 group query result, got {len(group_query)}" @@ -514,7 +547,9 @@ def get_hash(self, feature_flag: FeatureFlag, salt="") -> float: return hash_val / __LONG_SCALE__ def can_compute_locally( - self, properties: List[Property], group_type_index: Optional[GroupTypeIndex] = None + self, + properties: List[Property], + group_type_index: Optional[GroupTypeIndex] = None, ) -> bool: target_properties = self.property_value_overrides if group_type_index is not None: @@ -661,7 +696,10 @@ def get_all_feature_flags( SELECT key FROM posthog_featureflag WHERE team_id = %(team_id)s AND ensure_experience_continuity = TRUE AND active = TRUE AND deleted = FALSE AND key NOT IN (SELECT feature_flag_key FROM existing_overrides) """ - cursor.execute(query, {"team_id": team_id, "distinct_ids": tuple(distinct_ids)}) # type: ignore + cursor.execute( + query, + {"team_id": team_id, "distinct_ids": tuple(distinct_ids)}, # type: ignore + ) flags_with_no_overrides = [row[0] for row in cursor.fetchall()] should_write_hash_key_override = len(flags_with_no_overrides) > 0 except Exception as e: @@ -686,7 +724,8 @@ def get_all_feature_flags( ) team_id_label = label_for_team_id_to_track(team_id) FLAG_HASH_KEY_WRITES_COUNTER.labels( - team_id=team_id_label, successful_write=writing_hash_key_override + team_id=team_id_label, + successful_write=writing_hash_key_override, ).inc() except Exception as e: # If the database is in read-only mode, we can't handle experience continuity flags, @@ -695,7 +734,9 @@ def get_all_feature_flags( # For this case, and for any other case, do not error out on decide, just continue assuming continuity couldn't happen. # At the same time, don't set db down, because the read-replica might still be up. handle_feature_flag_exception( - e, "[Feature Flags] Error while setting feature flag hash key overrides", set_healthcheck=False + e, + "[Feature Flags] Error while setting feature flag hash key overrides", + set_healthcheck=False, ) # This is the read-path for experience continuity. We need to get the overrides, and to do that, we get the person_id. @@ -783,14 +824,24 @@ def set_feature_flag_hash_key_overrides(team_id: int, distinct_ids: List[str], h # We don't want to return an error response for `/decide` just because of this. # There can be cases where it's a different override (like a person on two different browser sending the same request at the same time), # but we don't care about that case because first override wins. - cursor.execute(query, {"team_id": team_id, "distinct_ids": tuple(distinct_ids), "hash_key_override": hash_key_override}) # type: ignore + cursor.execute( + query, + { + "team_id": team_id, + "distinct_ids": tuple(distinct_ids), # type: ignore + "hash_key_override": hash_key_override, + }, + ) return cursor.rowcount > 0 except IntegrityError as e: if "violates foreign key constraint" in str(e) and retry < max_retries - 1: # This can happen if a person is deleted while we're trying to add overrides for it. # This is the only case when we retry. - logger.info("Retrying set_feature_flag_hash_key_overrides due to person deletion", exc_info=True) + logger.info( + "Retrying set_feature_flag_hash_key_overrides due to person deletion", + exc_info=True, + ) time.sleep(retry_delay) else: raise e diff --git a/posthog/models/feature_flag/permissions.py b/posthog/models/feature_flag/permissions.py index 3df6cc1fe16b4..95d39636c4c07 100644 --- a/posthog/models/feature_flag/permissions.py +++ b/posthog/models/feature_flag/permissions.py @@ -22,7 +22,8 @@ def can_user_edit_feature_flag(request, feature_flag): all_role_memberships = request.user.role_memberships.select_related("role").all() try: feature_flag_resource_access = OrganizationResourceAccess.objects.get( - organization=request.user.organization, resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS + organization=request.user.organization, + resource=OrganizationResourceAccess.Resources.FEATURE_FLAGS, ) if feature_flag_resource_access.access_level >= OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT: return True @@ -30,7 +31,10 @@ def can_user_edit_feature_flag(request, feature_flag): except OrganizationResourceAccess.DoesNotExist: org_level = OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT - role_level = max([membership.role.feature_flags_access_level for membership in all_role_memberships], default=0) + role_level = max( + [membership.role.feature_flags_access_level for membership in all_role_memberships], + default=0, + ) if role_level == 0: final_level = org_level diff --git a/posthog/models/feature_flag/user_blast_radius.py b/posthog/models/feature_flag/user_blast_radius.py index 317c12e8a18ac..5843e3513e6b1 100644 --- a/posthog/models/feature_flag/user_blast_radius.py +++ b/posthog/models/feature_flag/user_blast_radius.py @@ -9,15 +9,17 @@ from posthog.models.team.team import Team -def get_user_blast_radius(team: Team, feature_flag_condition: dict, group_type_index: Optional[GroupTypeIndex] = None): - +def get_user_blast_radius( + team: Team, + feature_flag_condition: dict, + group_type_index: Optional[GroupTypeIndex] = None, +): from posthog.queries.person_query import PersonQuery # No rollout % calculations here, since it makes more sense to compute that on the frontend properties = feature_flag_condition.get("properties") or [] if group_type_index is not None: - try: from ee.clickhouse.queries.groups_join_query import GroupsJoinQuery except Exception: diff --git a/posthog/models/feedback/survey.py b/posthog/models/feedback/survey.py index 13938222d317e..b8a91b0b92527 100644 --- a/posthog/models/feedback/survey.py +++ b/posthog/models/feedback/survey.py @@ -16,7 +16,10 @@ class Meta: constraints = [models.UniqueConstraint(fields=["team", "name"], name="unique survey name for team")] team: models.ForeignKey = models.ForeignKey( - "posthog.Team", on_delete=models.CASCADE, related_name="surveys", related_query_name="survey" + "posthog.Team", + on_delete=models.CASCADE, + related_name="surveys", + related_query_name="survey", ) name: models.CharField = models.CharField(max_length=400) description: models.TextField = models.TextField(blank=True) @@ -57,7 +60,12 @@ class Meta: @mutable_receiver([post_save, post_delete], sender=Survey) def update_surveys_opt_in(sender, instance, **kwargs): active_surveys_count = ( - Survey.objects.filter(team_id=instance.team_id, start_date__isnull=False, end_date__isnull=True, archived=False) + Survey.objects.filter( + team_id=instance.team_id, + start_date__isnull=False, + end_date__isnull=True, + archived=False, + ) .exclude(type="api") .count() ) diff --git a/posthog/models/filters/base_filter.py b/posthog/models/filters/base_filter.py index 193c71f574b50..8b86de9b23129 100644 --- a/posthog/models/filters/base_filter.py +++ b/posthog/models/filters/base_filter.py @@ -40,7 +40,12 @@ def __init__( elif request.data and request.data.get(PROPERTIES): properties = request.data[PROPERTIES] - data = {**request.GET.dict(), **request.data, **(data if data else {}), **({PROPERTIES: properties})} + data = { + **request.GET.dict(), + **request.data, + **(data if data else {}), + **({PROPERTIES: properties}), + } elif data is None: raise ValueError("You need to define either a data dict or a request") @@ -50,7 +55,8 @@ def __init__( # Set the HogQL context for the request self.hogql_context = self.kwargs.get( - "hogql_context", HogQLContext(within_non_hogql_query=True, team_id=self.team.pk if self.team else None) + "hogql_context", + HogQLContext(within_non_hogql_query=True, team_id=self.team.pk if self.team else None), ) if self.team: self.hogql_context.person_on_events_mode = self.team.person_on_events_mode @@ -77,7 +83,8 @@ def toJSON(self): def shallow_clone(self, overrides: Dict[str, Any]): "Clone the filter's data while sharing the HogQL context" return type(self)( - data={**self._data, **overrides}, **{**self.kwargs, "team": self.team, "hogql_context": self.hogql_context} + data={**self._data, **overrides}, + **{**self.kwargs, "team": self.team, "hogql_context": self.hogql_context}, ) def query_tags(self) -> Dict[str, Any]: diff --git a/posthog/models/filters/mixins/common.py b/posthog/models/filters/mixins/common.py index bbb727407c6be..ae50d71f30656 100644 --- a/posthog/models/filters/mixins/common.py +++ b/posthog/models/filters/mixins/common.py @@ -50,7 +50,12 @@ ) from posthog.models.entity import Entity, ExclusionEntity, MathType from posthog.models.filters.mixins.base import BaseParamMixin, BreakdownType -from posthog.models.filters.mixins.utils import cached_property, include_dict, include_query_tags, process_bool +from posthog.models.filters.mixins.utils import ( + cached_property, + include_dict, + include_query_tags, + process_bool, +) from posthog.models.filters.utils import GroupTypeIndex, validate_group_type_index from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse_with_delta_mapping @@ -239,7 +244,10 @@ def breakdown_group_type_index(self) -> Optional[GroupTypeIndex]: @include_dict def breakdown_type_and_group_to_dict(self): if self.breakdown_type == "group": - return {BREAKDOWN_TYPE: self.breakdown_type, BREAKDOWN_GROUP_TYPE_INDEX: self.breakdown_group_type_index} + return { + BREAKDOWN_TYPE: self.breakdown_type, + BREAKDOWN_GROUP_TYPE_INDEX: self.breakdown_group_type_index, + } elif self.breakdown_type: return {BREAKDOWN_TYPE: self.breakdown_type} else: @@ -343,7 +351,11 @@ def date_from(self) -> Optional[datetime.datetime]: if self._date_from == "all": return None elif isinstance(self._date_from, str): - date, delta_mapping = relative_date_parse_with_delta_mapping(self._date_from, self.team.timezone_info, always_truncate=True) # type: ignore + date, delta_mapping = relative_date_parse_with_delta_mapping( + self._date_from, + self.team.timezone_info, # type: ignore + always_truncate=True, + ) self.date_from_delta_mapping = delta_mapping return date else: @@ -361,7 +373,11 @@ def date_to(self) -> datetime.datetime: if isinstance(self._date_to, str): try: return datetime.datetime.strptime(self._date_to, "%Y-%m-%d").replace( - hour=23, minute=59, second=59, microsecond=999999, tzinfo=ZoneInfo("UTC") + hour=23, + minute=59, + second=59, + microsecond=999999, + tzinfo=ZoneInfo("UTC"), ) except ValueError: try: @@ -369,7 +385,11 @@ def date_to(self) -> datetime.datetime: tzinfo=ZoneInfo("UTC") ) except ValueError: - date, delta_mapping = relative_date_parse_with_delta_mapping(self._date_to, self.team.timezone_info, always_truncate=True) # type: ignore + date, delta_mapping = relative_date_parse_with_delta_mapping( + self._date_to, + self.team.timezone_info, # type: ignore + always_truncate=True, + ) self.date_to_delta_mapping = delta_mapping return date else: diff --git a/posthog/models/filters/mixins/funnel.py b/posthog/models/filters/mixins/funnel.py index 4c13029ec0fb6..91312a5030478 100644 --- a/posthog/models/filters/mixins/funnel.py +++ b/posthog/models/filters/mixins/funnel.py @@ -264,7 +264,11 @@ class FunnelTrendsPersonsMixin(BaseParamMixin): @cached_property def entrance_period_start(self) -> Optional[datetime.datetime]: entrance_period_start_raw = self._data.get(ENTRANCE_PERIOD_START) - return relative_date_parse(entrance_period_start_raw, self.team.timezone_info) if entrance_period_start_raw else None # type: ignore + return ( + relative_date_parse(entrance_period_start_raw, self.team.timezone_info) # type: ignore + if entrance_period_start_raw + else None + ) @cached_property def drop_off(self) -> Optional[bool]: diff --git a/posthog/models/filters/mixins/property.py b/posthog/models/filters/mixins/property.py index 5812967c035e2..7ca409d4897d1 100644 --- a/posthog/models/filters/mixins/property.py +++ b/posthog/models/filters/mixins/property.py @@ -5,7 +5,11 @@ from posthog.constants import PROPERTIES, PropertyOperatorType from posthog.models.filters.mixins.base import BaseParamMixin -from posthog.models.filters.mixins.utils import cached_property, include_dict, include_query_tags +from posthog.models.filters.mixins.utils import ( + cached_property, + include_dict, + include_query_tags, +) from posthog.models.property import Property, PropertyGroup @@ -82,7 +86,10 @@ def _parse_properties(self, properties: Optional[Any]) -> List[Property]: key_split = key.split("__") ret.append( Property( - key=key_split[0], value=value, operator=key_split[1] if len(key_split) > 1 else None, type="event" + key=key_split[0], + value=value, + operator=key_split[1] if len(key_split) > 1 else None, + type="event", ) ) return ret @@ -90,7 +97,8 @@ def _parse_properties(self, properties: Optional[Any]) -> List[Property]: def _parse_property_group(self, group: Optional[Dict]) -> PropertyGroup: if group and "type" in group and "values" in group: return PropertyGroup( - PropertyOperatorType(group["type"].upper()), self._parse_property_group_list(group["values"]) + PropertyOperatorType(group["type"].upper()), + self._parse_property_group_list(group["values"]), ) return PropertyGroup(PropertyOperatorType.AND, cast(List[Property], [])) diff --git a/posthog/models/filters/mixins/retention.py b/posthog/models/filters/mixins/retention.py index 53146bf62a7b3..c2b55f3d30e2e 100644 --- a/posthog/models/filters/mixins/retention.py +++ b/posthog/models/filters/mixins/retention.py @@ -17,7 +17,11 @@ TREND_FILTER_TYPE_EVENTS, ) from posthog.models.entity import Entity -from posthog.models.filters.mixins.common import BaseParamMixin, DateMixin, EntitiesMixin +from posthog.models.filters.mixins.common import ( + BaseParamMixin, + DateMixin, + EntitiesMixin, +) from posthog.models.filters.mixins.utils import cached_property, include_dict from posthog.utils import relative_date_parse diff --git a/posthog/models/filters/mixins/simplify.py b/posthog/models/filters/mixins/simplify.py index 4afdc5dec64cf..4735a95e6a7d6 100644 --- a/posthog/models/filters/mixins/simplify.py +++ b/posthog/models/filters/mixins/simplify.py @@ -38,16 +38,23 @@ def simplify(self: T, team: "Team", **kwargs) -> T: updated_entities = {} if hasattr(result, "entities_to_dict"): for entity_type, entities in result.entities_to_dict().items(): - updated_entities[entity_type] = [self._simplify_entity(team, entity_type, entity, **kwargs) for entity in entities] # type: ignore + updated_entities[entity_type] = [ + self._simplify_entity(team, entity_type, entity, **kwargs) for entity in entities # type: ignore + ] from posthog.models.property.util import clear_excess_levels - prop_group = clear_excess_levels(self._simplify_property_group(team, result.property_groups, **kwargs), skip=True) # type: ignore + prop_group = clear_excess_levels( + self._simplify_property_group(team, result.property_groups, **kwargs), # type: ignore + skip=True, + ) prop_group = prop_group.to_dict() # type: ignore new_group_props = [] if getattr(result, "aggregation_group_type_index", None) is not None: - new_group_props.append(self._group_set_property(cast(int, result.aggregation_group_type_index)).to_dict()) # type: ignore + new_group_props.append( + self._group_set_property(cast(int, result.aggregation_group_type_index)).to_dict() # type: ignore + ) if new_group_props: new_group = {"type": "AND", "values": new_group_props} @@ -56,7 +63,11 @@ def simplify(self: T, team: "Team", **kwargs) -> T: return result.shallow_clone({**updated_entities, "properties": prop_group}) def _simplify_entity( - self, team: "Team", entity_type: Literal["events", "actions", "exclusions"], entity_params: Dict, **kwargs + self, + team: "Team", + entity_type: Literal["events", "actions", "exclusions"], + entity_params: Dict, + **kwargs, ) -> Dict: from posthog.models.entity import Entity, ExclusionEntity diff --git a/posthog/models/filters/mixins/test/test_interval.py b/posthog/models/filters/mixins/test/test_interval.py index efeb33e3479d7..d47adfc6b3e81 100644 --- a/posthog/models/filters/mixins/test/test_interval.py +++ b/posthog/models/filters/mixins/test/test_interval.py @@ -26,10 +26,13 @@ def test_filter_interval_success(filter, expected_interval): @pytest.mark.parametrize( "filter,expected_error_message", [ - (Filter(data={"interval": "foo"}), "Interval foo does not belong to SUPPORTED_INTERVAL_TYPES!"), + ( + Filter(data={"interval": "foo"}), + "Interval foo does not belong to SUPPORTED_INTERVAL_TYPES!", + ), (Filter(data={"interval": 123}), "Interval must be a string!"), ], ) def test_filter_interval_errors(filter, expected_error_message): with pytest.raises(ValueError, match=expected_error_message): - filter.interval + filter.interval # noqa: B018 diff --git a/posthog/models/filters/mixins/test/test_property.py b/posthog/models/filters/mixins/test/test_property.py index e1e250b5916a4..8f8b7c56721e2 100644 --- a/posthog/models/filters/mixins/test/test_property.py +++ b/posthog/models/filters/mixins/test/test_property.py @@ -13,7 +13,13 @@ def test_property_group_multi_level_parsing(): "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}]}, + { + "type": "AND", + "values": [ + {"key": "attr", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], + }, {"type": "OR", "values": [{"key": "attr", "value": "val_2"}]}, ], } @@ -42,7 +48,10 @@ def test_property_group_simple_parsing(): data={ "properties": { "type": "AND", - "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}], + "values": [ + {"key": "attr", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], } } ) @@ -64,22 +73,23 @@ def test_property_group_empty_parsing(): def test_property_group_invalid_parsing(): - filter = Filter( data={ "properties": { "type": "XaND", - "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}], + "values": [ + {"key": "attr", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], } } ) with pytest.raises(ValidationError): - filter.property_groups + filter.property_groups # noqa: B018 def test_property_group_includes_unhomogenous_groups(): - filter = Filter( data={ "properties": { @@ -95,7 +105,7 @@ def test_property_group_includes_unhomogenous_groups(): ) with pytest.raises(ValidationError): - filter.property_groups + filter.property_groups # noqa: B018 def test_property_multi_level_to_dict(): @@ -104,7 +114,13 @@ def test_property_multi_level_to_dict(): "properties": { "type": "AND", "values": [ - {"type": "AND", "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}]}, + { + "type": "AND", + "values": [ + {"key": "attr", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], + }, {"type": "OR", "values": [{"key": "attr", "value": "val_2"}]}, ], } @@ -121,7 +137,10 @@ def test_property_multi_level_to_dict(): {"key": "attr_2", "value": "val_2", "type": "event"}, ], }, - {"type": "OR", "values": [{"key": "attr", "value": "val_2", "type": "event"}]}, + { + "type": "OR", + "values": [{"key": "attr", "value": "val_2", "type": "event"}], + }, ], } @@ -131,7 +150,10 @@ def test_property_group_simple_to_dict(): data={ "properties": { "type": "AND", - "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}], + "values": [ + {"key": "attr", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], } } ) @@ -149,7 +171,13 @@ def test_property_group_simple_json_parsing(): filter = Filter( data={ "properties": json.dumps( - {"type": "AND", "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}]} + { + "type": "AND", + "values": [ + {"key": "attr", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], + } ) } ) @@ -173,7 +201,10 @@ def test_property_group_multi_level_json_parsing(): "values": [ { "type": "AND", - "values": [{"key": "attr", "value": "val_1"}, {"key": "attr_2", "value": "val_2"}], + "values": [ + {"key": "attr", "value": "val_1"}, + {"key": "attr_2", "value": "val_2"}, + ], }, {"type": "OR", "values": [{"key": "attr", "value": "val_2"}]}, ], diff --git a/posthog/models/filters/mixins/utils.py b/posthog/models/filters/mixins/utils.py index 2d224ca98a716..a297cdcfa6320 100644 --- a/posthog/models/filters/mixins/utils.py +++ b/posthog/models/filters/mixins/utils.py @@ -5,6 +5,7 @@ T = TypeVar("T") + # can't use cached_property directly from functools because of 3.7 compatibilty def cached_property(func: Callable[..., T]) -> T: return property(lru_cache(maxsize=1)(func)) # type: ignore diff --git a/posthog/models/filters/path_filter.py b/posthog/models/filters/path_filter.py index 9fe71d5d6d16b..4373092b91520 100644 --- a/posthog/models/filters/path_filter.py +++ b/posthog/models/filters/path_filter.py @@ -17,7 +17,11 @@ SampleMixin, SearchMixin, ) -from .mixins.funnel import FunnelCorrelationMixin, FunnelPersonsStepMixin, FunnelWindowMixin +from .mixins.funnel import ( + FunnelCorrelationMixin, + FunnelPersonsStepMixin, + FunnelWindowMixin, +) from .mixins.groups import GroupsAggregationMixin from .mixins.interval import IntervalMixin from .mixins.paths import ( @@ -76,7 +80,12 @@ class PathFilter( BaseFilter, SampleMixin, ): - def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None: + def __init__( + self, + data: Optional[Dict[str, Any]] = None, + request: Optional[Request] = None, + **kwargs, + ) -> None: if data: data["insight"] = INSIGHT_PATHS else: diff --git a/posthog/models/filters/retention_filter.py b/posthog/models/filters/retention_filter.py index cd767606a6dd1..9cc3e8d0c7a08 100644 --- a/posthog/models/filters/retention_filter.py +++ b/posthog/models/filters/retention_filter.py @@ -18,7 +18,11 @@ from .mixins.funnel import FunnelCorrelationMixin from .mixins.groups import GroupsAggregationMixin from .mixins.property import PropertyMixin -from .mixins.retention import EntitiesDerivedMixin, RetentionDateDerivedMixin, RetentionTypeMixin +from .mixins.retention import ( + EntitiesDerivedMixin, + RetentionDateDerivedMixin, + RetentionTypeMixin, +) from .mixins.simplify import SimplifyFilterMixin from .mixins.utils import cached_property, include_dict diff --git a/posthog/models/filters/stickiness_filter.py b/posthog/models/filters/stickiness_filter.py index dbabdd5e6897a..4674c4ceeb3d9 100644 --- a/posthog/models/filters/stickiness_filter.py +++ b/posthog/models/filters/stickiness_filter.py @@ -1,6 +1,11 @@ from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union -from django.db.models.functions.datetime import TruncDay, TruncHour, TruncMonth, TruncWeek +from django.db.models.functions.datetime import ( + TruncDay, + TruncHour, + TruncMonth, + TruncWeek, +) from rest_framework.exceptions import ValidationError from rest_framework.request import Request @@ -55,7 +60,12 @@ class StickinessFilter( get_earliest_timestamp: Optional[Callable] team: "Team" - def __init__(self, data: Optional[Dict[str, Any]] = None, request: Optional[Request] = None, **kwargs) -> None: + def __init__( + self, + data: Optional[Dict[str, Any]] = None, + request: Optional[Request] = None, + **kwargs, + ) -> None: if data: data["insight"] = INSIGHT_STICKINESS else: diff --git a/posthog/models/filters/test/test_filter.py b/posthog/models/filters/test/test_filter.py index 3113cc3598000..d7f60b149b93b 100644 --- a/posthog/models/filters/test/test_filter.py +++ b/posthog/models/filters/test/test_filter.py @@ -60,7 +60,12 @@ def test_to_dict(self): def test_simplify_test_accounts(self): self.team.test_account_filters = [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ] self.team.save() @@ -70,7 +75,12 @@ def test_simplify_test_accounts(self): self.assertEqual( filter.properties_to_dict(), - {"properties": {"type": "AND", "values": [{"key": "attr", "value": "some_val", "type": "event"}]}}, + { + "properties": { + "type": "AND", + "values": [{"key": "attr", "value": "some_val", "type": "event"}], + } + }, ) self.assertTrue(filter.is_simplified) @@ -85,10 +95,18 @@ def test_simplify_test_accounts(self): { "type": "AND", "values": [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ], }, - {"type": "AND", "values": [{"key": "attr", "value": "some_val", "type": "event"}]}, + { + "type": "AND", + "values": [{"key": "attr", "value": "some_val", "type": "event"}], + }, ], } }, @@ -104,10 +122,18 @@ def test_simplify_test_accounts(self): { "type": "AND", "values": [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ], }, - {"type": "AND", "values": [{"key": "attr", "value": "some_val", "type": "event"}]}, + { + "type": "AND", + "values": [{"key": "attr", "value": "some_val", "type": "event"}], + }, ], } }, @@ -117,27 +143,61 @@ def test_simplify_test_accounts(self): def property_to_Q_test_factory(filter_persons: Callable, person_factory): class TestPropertiesToQ(BaseTest): def test_simple_persons(self): - person_factory(team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com"}, + ) person_factory(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1}) - person_factory(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}}) + person_factory( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}}, + ) person_factory(team_id=self.team.pk, distinct_ids=["person4"]) - filter = Filter(data={"properties": [{"type": "person", "key": "url", "value": "https://whatever.com"}]}) + filter = Filter( + data={ + "properties": [ + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + } + ] + } + ) results = filter_persons(filter, self.team) self.assertEqual(len(results), 1) def test_multiple_equality_persons(self): - person_factory(team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com"}, + ) person_factory(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1}) - person_factory(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}}) + person_factory( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}}, + ) person_factory(team_id=self.team.pk, distinct_ids=["person4"]) - person_factory(team_id=self.team.pk, distinct_ids=["person5"], properties={"url": "https://example.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["person5"], + properties={"url": "https://example.com"}, + ) filter = Filter( data={ "properties": [ - {"type": "person", "key": "url", "value": ["https://whatever.com", "https://example.com"]} + { + "type": "person", + "key": "url", + "value": ["https://whatever.com", "https://example.com"], + } ] } ) @@ -147,7 +207,15 @@ def test_multiple_equality_persons(self): def test_incomplete_data(self): filter = Filter( - data={"properties": [{"key": "$current_url", "operator": "not_icontains", "type": "event"}]} + data={ + "properties": [ + { + "key": "$current_url", + "operator": "not_icontains", + "type": "event", + } + ] + } ) self.assertListEqual(filter.property_groups.values, []) @@ -156,21 +224,60 @@ def test_numerical_person_properties(self): person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"$a_number": 5}) person_factory(team_id=self.team.pk, distinct_ids=["p3"], properties={"$a_number": 6}) - filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 4, "operator": "gt"}]}) + filter = Filter( + data={ + "properties": [ + { + "type": "person", + "key": "$a_number", + "value": 4, + "operator": "gt", + } + ] + } + ) self.assertEqual(len(filter_persons(filter, self.team)), 2) filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 5}]}) self.assertEqual(len(filter_persons(filter, self.team)), 1) - filter = Filter(data={"properties": [{"type": "person", "key": "$a_number", "value": 6, "operator": "lt"}]}) + filter = Filter( + data={ + "properties": [ + { + "type": "person", + "key": "$a_number", + "value": 6, + "operator": "lt", + } + ] + } + ) self.assertEqual(len(filter_persons(filter, self.team)), 2) def test_contains_persons(self): - person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"}) - person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://example.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"url": "https://whatever.com"}, + ) + person_factory( + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"url": "https://example.com"}, + ) filter = Filter( - data={"properties": [{"type": "person", "key": "url", "value": "whatever", "operator": "icontains"}]} + data={ + "properties": [ + { + "type": "person", + "key": "url", + "value": "whatever", + "operator": "icontains", + } + ] + } ) results = filter_persons(filter, self.team) @@ -179,49 +286,106 @@ def test_contains_persons(self): def test_regex_persons(self): p1_uuid = str( person_factory( - team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"url": "https://whatever.com"}, ).uuid ) p2_uuid = str(person_factory(team_id=self.team.pk, distinct_ids=["p2"]).uuid) filter = Filter( - data={"properties": [{"type": "person", "key": "url", "value": r"\.com$", "operator": "regex"}]} + data={ + "properties": [ + { + "type": "person", + "key": "url", + "value": r"\.com$", + "operator": "regex", + } + ] + } ) results = filter_persons(filter, self.team) self.assertCountEqual(results, [p1_uuid]) filter = Filter( - data={"properties": [{"type": "person", "key": "url", "value": r"\.eee$", "operator": "not_regex"}]} + data={ + "properties": [ + { + "type": "person", + "key": "url", + "value": r"\.eee$", + "operator": "not_regex", + } + ] + } ) results = filter_persons(filter, self.team) self.assertCountEqual(results, [p1_uuid, p2_uuid]) def test_invalid_regex_persons(self): - person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"}) - person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://example.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"url": "https://whatever.com"}, + ) + person_factory( + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"url": "https://example.com"}, + ) filter = Filter( - data={"properties": [{"type": "person", "key": "url", "value": r"?*", "operator": "regex"}]} + data={ + "properties": [ + { + "type": "person", + "key": "url", + "value": r"?*", + "operator": "regex", + } + ] + } ) self.assertEqual(len(filter_persons(filter, self.team)), 0) filter = Filter( - data={"properties": [{"type": "person", "key": "url", "value": r"?*", "operator": "not_regex"}]} + data={ + "properties": [ + { + "type": "person", + "key": "url", + "value": r"?*", + "operator": "not_regex", + } + ] + } ) self.assertEqual(len(filter_persons(filter, self.team)), 0) def test_is_not_persons(self): - person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"url": "https://whatever.com"}, + ) p2_uuid = str( person_factory( - team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://example.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"url": "https://example.com"}, ).uuid ) filter = Filter( data={ "properties": [ - {"type": "person", "key": "url", "value": "https://whatever.com", "operator": "is_not"} + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + "operator": "is_not", + } ] } ) @@ -229,10 +393,16 @@ def test_is_not_persons(self): self.assertCountEqual(results, [p2_uuid]) def test_does_not_contain_persons(self): - person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"url": "https://whatever.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"url": "https://whatever.com"}, + ) p2_uuid = str( person_factory( - team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://example.com"} + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"url": "https://example.com"}, ).uuid ) p3_uuid = str(person_factory(team_id=self.team.pk, distinct_ids=["p3"]).uuid) @@ -241,7 +411,12 @@ def test_does_not_contain_persons(self): filter = Filter( data={ "properties": [ - {"type": "person", "key": "url", "value": "whatever.com", "operator": "not_icontains"} + { + "type": "person", + "key": "url", + "value": "whatever.com", + "operator": "not_icontains", + } ] } ) @@ -256,12 +431,21 @@ def test_multiple_persons(self): properties={"url": "https://whatever.com", "another_key": "value"}, ).uuid ) - person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"url": "https://whatever.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"url": "https://whatever.com"}, + ) filter = Filter( data={ "properties": [ - {"type": "person", "key": "url", "value": "whatever.com", "operator": "icontains"}, + { + "type": "person", + "key": "url", + "value": "whatever.com", + "operator": "icontains", + }, {"type": "person", "key": "another_key", "value": "value"}, ] } @@ -271,7 +455,11 @@ def test_multiple_persons(self): def test_boolean_filters_persons(self): p1_uuid = str( - person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"is_first_user": True}).uuid + person_factory( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"is_first_user": True}, + ).uuid ) person_factory(team_id=self.team.pk, distinct_ids=["p2"]) @@ -281,29 +469,62 @@ def test_boolean_filters_persons(self): def test_is_not_set_and_is_set_persons(self): p1_uuid = str( - person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"is_first_user": True}).uuid + person_factory( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"is_first_user": True}, + ).uuid ) p2_uuid = str(person_factory(team_id=self.team.pk, distinct_ids=["p2"]).uuid) filter = Filter( - data={"properties": [{"type": "person", "key": "is_first_user", "value": "", "operator": "is_set"}]} + data={ + "properties": [ + { + "type": "person", + "key": "is_first_user", + "value": "", + "operator": "is_set", + } + ] + } ) results = filter_persons(filter, self.team) self.assertEqual(results, [p1_uuid]) filter = Filter( - data={"properties": [{"type": "person", "key": "is_first_user", "value": "", "operator": "is_not_set"}]} + data={ + "properties": [ + { + "type": "person", + "key": "is_first_user", + "value": "", + "operator": "is_not_set", + } + ] + } ) results = filter_persons(filter, self.team) self.assertEqual(results, [p2_uuid]) def test_is_not_true_false_persons(self): - person_factory(team_id=self.team.pk, distinct_ids=["p1"], properties={"is_first_user": True}) + person_factory( + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"is_first_user": True}, + ) p2_uuid = str(person_factory(team_id=self.team.pk, distinct_ids=["p2"]).uuid) filter = Filter( data={ - "properties": [{"type": "person", "key": "is_first_user", "value": ["true"], "operator": "is_not"}] + "properties": [ + { + "type": "person", + "key": "is_first_user", + "value": ["true"], + "operator": "is_not", + } + ] } ) results = filter_persons(filter, self.team) @@ -312,15 +533,26 @@ def test_is_not_true_false_persons(self): def test_is_date_before_persons(self): p1_uuid = str( person_factory( - team_id=self.team.pk, distinct_ids=["p1"], properties={"some-timestamp": "2022-03-01"} + team_id=self.team.pk, + distinct_ids=["p1"], + properties={"some-timestamp": "2022-03-01"}, ).uuid ) - person_factory(team_id=self.team.pk, distinct_ids=["p2"], properties={"some-timestamp": "2022-05-01"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["p2"], + properties={"some-timestamp": "2022-05-01"}, + ) filter = Filter( data={ "properties": [ - {"type": "person", "key": "some-timestamp", "value": "2022-04-01", "operator": "is_date_before"} + { + "type": "person", + "key": "some-timestamp", + "value": "2022-04-01", + "operator": "is_date_before", + } ] } ) @@ -348,14 +580,25 @@ def test_json_object(self): self.assertEqual(results, [str(p1_uuid.uuid)]) def test_filter_out_team_members_persons(self): - person_factory(team_id=self.team.pk, distinct_ids=["team_member"], properties={"email": "test@posthog.com"}) + person_factory( + team_id=self.team.pk, + distinct_ids=["team_member"], + properties={"email": "test@posthog.com"}, + ) p2_uuid = str( person_factory( - team_id=self.team.pk, distinct_ids=["random_user"], properties={"email": "test@gmail.com"} + team_id=self.team.pk, + distinct_ids=["random_user"], + properties={"email": "test@gmail.com"}, ).uuid ) self.team.test_account_filters = [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"} + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + } ] self.team.save() filter = Filter(data={FILTER_TEST_ACCOUNTS: True}, team=self.team) @@ -373,7 +616,9 @@ def _filter_persons(filter: Filter, team: Team): return [str(uuid) for uuid in persons.values_list("uuid", flat=True)] -class TestDjangoPropertiesToQ(property_to_Q_test_factory(_filter_persons, _create_person), QueryMatchingTest): # type: ignore +class TestDjangoPropertiesToQ( + property_to_Q_test_factory(_filter_persons, _create_person), QueryMatchingTest +): # type: ignore @snapshot_postgres_queries def test_array_property_as_string_on_persons(self): Person.objects.create( @@ -388,7 +633,16 @@ def test_array_property_as_string_on_persons(self): # some idiosyncracies on how this works, but we shouldn't error out on this filter = Filter( - data={"properties": [{"type": "person", "key": "urls", "operator": "icontains", "value": '["abcd"]'}]} + data={ + "properties": [ + { + "type": "person", + "key": "urls", + "operator": "icontains", + "value": '["abcd"]', + } + ] + } ) persons = Person.objects.filter(property_group_to_Q(filter.property_groups)) @@ -401,7 +655,9 @@ def test_array_property_as_string_on_persons(self): def test_person_cohort_properties(self): person1_distinct_id = "person1" person1 = Person.objects.create( - team=self.team, distinct_ids=[person1_distinct_id], properties={"$some_prop": 1} + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"$some_prop": 1}, ) cohort1 = Cohort.objects.create(team=self.team, groups=[{"properties": {"$some_prop": 1}}], name="cohort1") cohort1.people.add(person1) @@ -410,7 +666,10 @@ def test_person_cohort_properties(self): with self.assertNumQueries(2): matched_person = ( - Person.objects.filter(team_id=self.team.pk, persondistinctid__distinct_id=person1_distinct_id) + Person.objects.filter( + team_id=self.team.pk, + persondistinctid__distinct_id=person1_distinct_id, + ) .filter(properties_to_Q(filter.property_groups.flat)) .exists() ) @@ -419,7 +678,9 @@ def test_person_cohort_properties(self): def test_person_cohort_properties_with_zero_value(self): person1_distinct_id = "person1" person1 = Person.objects.create( - team=self.team, distinct_ids=[person1_distinct_id], properties={"$some_prop": 0} + team=self.team, + distinct_ids=[person1_distinct_id], + properties={"$some_prop": 0}, ) cohort1 = Cohort.objects.create(team=self.team, groups=[{"properties": {"$some_prop": 0}}], name="cohort1") cohort1.people.add(person1) @@ -428,7 +689,10 @@ def test_person_cohort_properties_with_zero_value(self): with self.assertNumQueries(2): matched_person = ( - Person.objects.filter(team_id=self.team.pk, persondistinctid__distinct_id=person1_distinct_id) + Person.objects.filter( + team_id=self.team.pk, + persondistinctid__distinct_id=person1_distinct_id, + ) .filter(properties_to_Q(filter.property_groups.flat)) .exists() ) @@ -436,7 +700,11 @@ def test_person_cohort_properties_with_zero_value(self): def test_person_cohort_properties_with_negation(self): person1_distinct_id = "example_id" - Person.objects.create(team=self.team, distinct_ids=["example_id"], properties={"$some_prop": "matches"}) + Person.objects.create( + team=self.team, + distinct_ids=["example_id"], + properties={"$some_prop": "matches"}, + ) user_in = Cohort.objects.create( team=self.team, @@ -447,7 +715,11 @@ def test_person_cohort_properties_with_negation(self): { "type": "AND", "values": [ - {"key": "$some_prop", "value": "matches", "type": "person"}, + { + "key": "$some_prop", + "value": "matches", + "type": "person", + }, ], } ], @@ -464,7 +736,11 @@ def test_person_cohort_properties_with_negation(self): { "type": "OR", "values": [ - {"key": "$bad_prop", "value": "nomatchihope", "type": "person"}, + { + "key": "$bad_prop", + "value": "nomatchihope", + "type": "person", + }, ], }, ], @@ -501,14 +777,28 @@ def test_person_cohort_properties_with_negation(self): with self.assertNumQueries(4): matched_person = ( - Person.objects.filter(team_id=self.team.pk, persondistinctid__distinct_id=person1_distinct_id) + Person.objects.filter( + team_id=self.team.pk, + persondistinctid__distinct_id=person1_distinct_id, + ) .filter(properties_to_Q(filter.property_groups.flat)) .exists() ) self.assertTrue(matched_person) def test_group_property_filters_direct(self): - filter = Filter(data={"properties": [{"key": "some_prop", "value": 5, "type": "group", "group_type_index": 1}]}) + filter = Filter( + data={ + "properties": [ + { + "key": "some_prop", + "value": 5, + "type": "group", + "group_type_index": 1, + } + ] + } + ) query_filter = properties_to_Q(filter.property_groups.flat) self.assertEqual( query_filter, @@ -543,9 +833,17 @@ def filter_persons_with_property_group( class TestDjangoPropertyGroupToQ(BaseTest, QueryMatchingTest): def test_simple_property_group_to_q(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com"}, + ) _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1}) - _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}}, + ) _create_person(team_id=self.team.pk, distinct_ids=["person4"]) filter = Filter( @@ -553,7 +851,11 @@ def test_simple_property_group_to_q(self): "properties": { "type": "OR", "values": [ - {"type": "person", "key": "url", "value": "https://whatever.com"}, + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + }, {"type": "person", "key": "url", "value": 1}, ], } @@ -566,10 +868,20 @@ def test_simple_property_group_to_q(self): def test_multiple_properties_property_group_to_q(self): _create_person( - team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1} + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com", "bla": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"url": 1, "bla": 2}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}, "bla": 3}, ) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2}) - _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3}) _create_person(team_id=self.team.pk, distinct_ids=["person4"]) filter = Filter( @@ -577,7 +889,11 @@ def test_multiple_properties_property_group_to_q(self): "properties": { "type": "OR", "values": [ - {"type": "person", "key": "url", "value": "https://whatever.com"}, + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + }, {"type": "person", "key": "bla", "value": 1}, ], } @@ -590,10 +906,20 @@ def test_multiple_properties_property_group_to_q(self): def test_nested_property_group_to_q(self): _create_person( - team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1} + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com", "bla": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"url": 1, "bla": 2}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}, "bla": 3}, ) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2}) - _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3}) _create_person(team_id=self.team.pk, distinct_ids=["person4"]) filter = Filter( @@ -604,11 +930,18 @@ def test_nested_property_group_to_q(self): { "type": "AND", "values": [ - {"type": "person", "key": "url", "value": "https://whatever.com"}, + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + }, {"type": "person", "key": "bla", "value": 1}, ], }, - {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]}, + { + "type": "AND", + "values": [{"type": "person", "key": "bla", "value": 3}], + }, ], } } @@ -620,10 +953,20 @@ def test_nested_property_group_to_q(self): def test_property_group_to_q_with_property_overrides(self): _create_person( - team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1} + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com", "bla": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"url": 1, "bla": 2}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}, "bla": 3}, ) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2}) - _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3}) _create_person(team_id=self.team.pk, distinct_ids=["person4"]) filter = Filter( @@ -634,11 +977,18 @@ def test_property_group_to_q_with_property_overrides(self): { "type": "AND", "values": [ - {"type": "person", "key": "url", "value": "https://whatever.com"}, + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + }, {"type": "person", "key": "bla", "value": 1}, ], }, - {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]}, + { + "type": "AND", + "values": [{"type": "person", "key": "bla", "value": 3}], + }, ], } } @@ -651,10 +1001,20 @@ def test_property_group_to_q_with_property_overrides(self): @snapshot_postgres_queries def test_property_group_to_q_with_cohorts(self): _create_person( - team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1} + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com", "bla": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"url": 1, "bla": 2}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}, "bla": 3}, ) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2}) - _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3}) _create_person(team_id=self.team.pk, distinct_ids=["person4"]) cohort1 = Cohort.objects.create( @@ -679,12 +1039,19 @@ def test_property_group_to_q_with_cohorts(self): { "type": "AND", "values": [ - {"type": "person", "key": "url", "value": "https://whatever.com"}, + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + }, {"type": "person", "key": "bla", "value": 1}, {"type": "cohort", "key": "id", "value": cohort1.pk}, ], }, - {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]}, + { + "type": "AND", + "values": [{"type": "person", "key": "bla", "value": 3}], + }, ], } } @@ -696,12 +1063,36 @@ def test_property_group_to_q_with_cohorts(self): @snapshot_postgres_queries def test_property_group_to_q_with_negation_cohorts(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"bla": 1, "other": 1}) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"bla": 2, "other": 1}) - _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"bla": 3, "other": 2}) - _create_person(team_id=self.team.pk, distinct_ids=["person4"], properties={"bla": 4, "other": 1}) - _create_person(team_id=self.team.pk, distinct_ids=["person5"], properties={"bla": 5, "other": 1}) - _create_person(team_id=self.team.pk, distinct_ids=["person6"], properties={"bla": 6, "other": 1}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"bla": 1, "other": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"bla": 2, "other": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"bla": 3, "other": 2}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person4"], + properties={"bla": 4, "other": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person5"], + properties={"bla": 5, "other": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person6"], + properties={"bla": 6, "other": 1}, + ) cohort1 = Cohort.objects.create( team=self.team, @@ -750,8 +1141,18 @@ def test_property_group_to_q_with_negation_cohorts(self): "properties": { "type": "AND", "values": [ - {"type": "cohort", "key": "id", "value": cohort1.pk, "negation": True}, - {"type": "cohort", "key": "id", "value": cohort2.pk, "negation": True}, + { + "type": "cohort", + "key": "id", + "value": cohort1.pk, + "negation": True, + }, + { + "type": "cohort", + "key": "id", + "value": cohort2.pk, + "negation": True, + }, {"type": "cohort", "key": "id", "value": cohort3.pk}, ], } @@ -777,10 +1178,20 @@ def test_property_group_to_q_with_negation_cohorts(self): @snapshot_postgres_queries def test_property_group_to_q_with_cohorts_no_match(self): _create_person( - team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1} + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com", "bla": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"url": 1, "bla": 2}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}, "bla": 3}, ) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2}) - _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3}) _create_person(team_id=self.team.pk, distinct_ids=["person4"]) cohort1 = Cohort.objects.create( @@ -805,12 +1216,19 @@ def test_property_group_to_q_with_cohorts_no_match(self): { "type": "AND", "values": [ - {"type": "person", "key": "url", "value": "https://whatever.com"}, + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + }, {"type": "person", "key": "bla", "value": 1}, {"type": "cohort", "key": "id", "value": cohort1.pk}, ], }, - {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]}, + { + "type": "AND", + "values": [{"type": "person", "key": "bla", "value": 3}], + }, ], } } @@ -822,13 +1240,27 @@ def test_property_group_to_q_with_cohorts_no_match(self): def test_property_group_to_q_with_behavioural_cohort(self): _create_person( - team_id=self.team.pk, distinct_ids=["person1"], properties={"url": "https://whatever.com", "bla": 1} + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"url": "https://whatever.com", "bla": 1}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"url": 1, "bla": 2}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"url": {"bla": "bla"}, "bla": 3}, ) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"url": 1, "bla": 2}) - _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"url": {"bla": "bla"}, "bla": 3}) _create_person(team_id=self.team.pk, distinct_ids=["person4"]) - cohort2 = Cohort.objects.create(team=self.team, groups=[{"event_id": "$pageview", "days": 7}], name="cohort2") + cohort2 = Cohort.objects.create( + team=self.team, + groups=[{"event_id": "$pageview", "days": 7}], + name="cohort2", + ) filter = Filter( data={ @@ -838,12 +1270,19 @@ def test_property_group_to_q_with_behavioural_cohort(self): { "type": "AND", "values": [ - {"type": "person", "key": "url", "value": "https://whatever.com"}, + { + "type": "person", + "key": "url", + "value": "https://whatever.com", + }, {"type": "person", "key": "bla", "value": 1}, {"type": "cohort", "key": "id", "value": cohort2.pk}, ], }, - {"type": "AND", "values": [{"type": "person", "key": "bla", "value": 3}]}, + { + "type": "AND", + "values": [{"type": "person", "key": "bla", "value": 3}], + }, ], } } diff --git a/posthog/models/filters/test/test_lifecycle_filter.py b/posthog/models/filters/test/test_lifecycle_filter.py index 8bf5d904c66f4..9273a12e654d1 100644 --- a/posthog/models/filters/test/test_lifecycle_filter.py +++ b/posthog/models/filters/test/test_lifecycle_filter.py @@ -64,4 +64,7 @@ def test_filter_properties(self): }, ) self.assertEqual(filter.lifecycle_type, lifecycle_type) - self.assertEqual(filter.target_date, relative_date_parse(target_date, self.team.timezone_info)) + self.assertEqual( + filter.target_date, + relative_date_parse(target_date, self.team.timezone_info), + ) diff --git a/posthog/models/filters/utils.py b/posthog/models/filters/utils.py index 7c2f75331bc74..0b31b209afa69 100644 --- a/posthog/models/filters/utils.py +++ b/posthog/models/filters/utils.py @@ -33,19 +33,31 @@ def get_filter(team, data: dict = {}, request: Optional[Request] = None): if insight == INSIGHT_RETENTION: return RetentionFilter(data={**data, "insight": INSIGHT_RETENTION}, request=request, team=team) elif insight == INSIGHT_STICKINESS or (insight == INSIGHT_TRENDS and data.get("shown_as") == "Stickiness"): - return StickinessFilter(data=data, request=request, team=team, get_earliest_timestamp=earliest_timestamp_func) + return StickinessFilter( + data=data, + request=request, + team=team, + get_earliest_timestamp=earliest_timestamp_func, + ) elif insight == INSIGHT_PATHS: return PathFilter(data={**data, "insight": INSIGHT_PATHS}, request=request, team=team) elif insight == INSIGHT_FUNNELS: return Filter( - data={**data, **(request.data if request else {}), "insight": INSIGHT_FUNNELS}, request=request, team=team + data={ + **data, + **(request.data if request else {}), + "insight": INSIGHT_FUNNELS, + }, + request=request, + team=team, ) return Filter(data=data, request=request, team=team) def validate_group_type_index(param_name: str, value: Any, required=False) -> Optional[GroupTypeIndex]: error = ValidationError( - f"{param_name} is required to be at least 0 and less than {GROUP_TYPES_LIMIT}", code="invalid" + f"{param_name} is required to be at least 0 and less than {GROUP_TYPES_LIMIT}", + code="invalid", ) if required and value is None: diff --git a/posthog/models/group/sql.py b/posthog/models/group/sql.py index 2eb5222859729..41b9e72cebcbb 100644 --- a/posthog/models/group/sql.py +++ b/posthog/models/group/sql.py @@ -36,7 +36,10 @@ ) KAFKA_GROUPS_TABLE_SQL = lambda: GROUPS_TABLE_BASE_SQL.format( - table_name="kafka_" + GROUPS_TABLE, cluster=CLICKHOUSE_CLUSTER, engine=kafka_engine(KAFKA_GROUPS), extra_fields="" + table_name="kafka_" + GROUPS_TABLE, + cluster=CLICKHOUSE_CLUSTER, + engine=kafka_engine(KAFKA_GROUPS), + extra_fields="", ) # You must include the database here because of a bug in clickhouse diff --git a/posthog/models/group/util.py b/posthog/models/group/util.py index fa3520dc9912c..427c883a2e920 100644 --- a/posthog/models/group/util.py +++ b/posthog/models/group/util.py @@ -60,7 +60,15 @@ def create_group( else: timestamp = timestamp.astimezone(ZoneInfo("UTC")) - raw_create_group_ch(team_id, group_type_index, group_key, properties, timestamp, timestamp=timestamp, sync=sync) + raw_create_group_ch( + team_id, + group_type_index, + group_key, + properties, + timestamp, + timestamp=timestamp, + sync=sync, + ) group = Group.objects.create( team_id=team_id, group_type_index=group_type_index, @@ -73,7 +81,9 @@ def create_group( def get_aggregation_target_field( - aggregation_group_type_index: Optional[GroupTypeIndex], event_table_alias: str, default: str + aggregation_group_type_index: Optional[GroupTypeIndex], + event_table_alias: str, + default: str, ) -> str: if aggregation_group_type_index is not None: return f'{event_table_alias}."$group_{aggregation_group_type_index}"' diff --git a/posthog/models/group_type_mapping.py b/posthog/models/group_type_mapping.py index 80ebdebfdeaf6..ed4a19164f4fb 100644 --- a/posthog/models/group_type_mapping.py +++ b/posthog/models/group_type_mapping.py @@ -7,9 +7,13 @@ class GroupTypeMapping(models.Model): class Meta: constraints = [ models.UniqueConstraint(fields=["team", "group_type"], name="unique group types for team"), - models.UniqueConstraint(fields=["team", "group_type_index"], name="unique event column indexes for team"), + models.UniqueConstraint( + fields=["team", "group_type_index"], + name="unique event column indexes for team", + ), models.CheckConstraint( - check=models.Q(group_type_index__lte=5), name="group_type_index is less than or equal 5" + check=models.Q(group_type_index__lte=5), + name="group_type_index is less than or equal 5", ), ] diff --git a/posthog/models/ingestion_warnings/sql.py b/posthog/models/ingestion_warnings/sql.py index 55a631a0835c0..6f3023744f51f 100644 --- a/posthog/models/ingestion_warnings/sql.py +++ b/posthog/models/ingestion_warnings/sql.py @@ -1,7 +1,11 @@ from django.conf import settings from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS_WITH_PARTITION, kafka_engine -from posthog.clickhouse.table_engines import Distributed, MergeTreeEngine, ReplicationScheme +from posthog.clickhouse.table_engines import ( + Distributed, + MergeTreeEngine, + ReplicationScheme, +) from posthog.kafka_client.topics import KAFKA_INGESTION_WARNINGS INGESTION_WARNINGS_TABLE_BASE_SQL = """ diff --git a/posthog/models/insight.py b/posthog/models/insight.py index 1c5d168ed7b5b..a3057cdb11c7d 100644 --- a/posthog/models/insight.py +++ b/posthog/models/insight.py @@ -52,12 +52,20 @@ class Insight(models.Model): refresh_attempt: models.IntegerField = models.IntegerField(null=True, blank=True) last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now) last_modified_by: models.ForeignKey = models.ForeignKey( - "User", on_delete=models.SET_NULL, null=True, blank=True, related_name="modified_insights" + "User", + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="modified_insights", ) # DEPRECATED: using the new "dashboards" relation instead dashboard: models.ForeignKey = models.ForeignKey( - "Dashboard", related_name="items", on_delete=models.CASCADE, null=True, blank=True + "Dashboard", + related_name="items", + on_delete=models.CASCADE, + null=True, + blank=True, ) # DEPRECATED: on dashboard_insight now layouts: models.JSONField = models.JSONField(default=dict) @@ -75,7 +83,11 @@ class Insight(models.Model): deprecated_tags: ArrayField = ArrayField(models.CharField(max_length=32), null=True, blank=True, default=list) # DEPRECATED: now using app-wide tagging model. See EnterpriseTaggedItem deprecated_tags_v2: ArrayField = ArrayField( - models.CharField(max_length=32), null=True, blank=True, default=None, db_column="tags" + models.CharField(max_length=32), + null=True, + blank=True, + default=None, + db_column="tags", ) # Changing these fields materially alters the Insight, so these count for the "last_modified_*" fields @@ -141,7 +153,10 @@ def dashboard_filters(self, dashboard: Optional[Dashboard] = None): elif self.filters.get("properties", {}).get("type"): filters["properties"] = { "type": "AND", - "values": [self.filters["properties"], {"type": "AND", "values": dashboard_properties}], + "values": [ + self.filters["properties"], + {"type": "AND", "values": dashboard_properties}, + ], } elif not self.filters.get("properties"): filters["properties"] = dashboard_properties @@ -157,7 +172,9 @@ def dashboard_filters(self, dashboard: Optional[Dashboard] = None): def dashboard_query(self, dashboard: Optional[Dashboard]) -> Optional[dict]: if not dashboard or not self.query: return self.query - from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters + from posthog.hogql_queries.apply_dashboard_filters import ( + apply_dashboard_filters, + ) return apply_dashboard_filters(self.query, dashboard.filters, self.team) @@ -184,7 +201,9 @@ def generate_insight_cache_key(insight: Insight, dashboard: Optional[Dashboard]) dashboard_filters = dashboard.filters if dashboard else None if dashboard_filters: - from posthog.hogql_queries.apply_dashboard_filters import apply_dashboard_filters + from posthog.hogql_queries.apply_dashboard_filters import ( + apply_dashboard_filters, + ) q = apply_dashboard_filters(insight.query, dashboard_filters, insight.team) else: diff --git a/posthog/models/insight_caching_state.py b/posthog/models/insight_caching_state.py index 2c1382b637efd..9e6abc0b7b5a8 100644 --- a/posthog/models/insight_caching_state.py +++ b/posthog/models/insight_caching_state.py @@ -15,15 +15,24 @@ class Meta: indexes = [models.Index(fields=["cache_key"], name="filter_by_cache_key_idx")] constraints = [ UniqueConstraintByExpression( - name="unique_insight_tile_idx", expression="(insight_id, coalesce(dashboard_tile_id, -1))" + name="unique_insight_tile_idx", + expression="(insight_id, coalesce(dashboard_tile_id, -1))", ) ] team: models.ForeignKey = models.ForeignKey(Team, on_delete=models.CASCADE) - insight = models.ForeignKey("posthog.Insight", on_delete=models.CASCADE, related_name="caching_states", null=False) + insight = models.ForeignKey( + "posthog.Insight", + on_delete=models.CASCADE, + related_name="caching_states", + null=False, + ) dashboard_tile = models.ForeignKey( - "posthog.DashboardTile", on_delete=models.CASCADE, related_name="caching_states", null=True + "posthog.DashboardTile", + on_delete=models.CASCADE, + related_name="caching_states", + null=True, ) cache_key: models.CharField = models.CharField(max_length=400, null=False, blank=False) @@ -67,7 +76,11 @@ def sync_dashboard_updated(sender, instance: Dashboard, **kwargs): from posthog.celery import sync_insight_caching_state update_fields = kwargs.get("update_fields") - if update_fields in [frozenset({"filters_hash"}), frozenset({"last_refresh"}), frozenset({"last_accessed_at"})]: + if update_fields in [ + frozenset({"filters_hash"}), + frozenset({"last_refresh"}), + frozenset({"last_accessed_at"}), + ]: return for tile_id in DashboardTile.objects.filter(dashboard=instance).values_list("pk", flat=True): diff --git a/posthog/models/integration.py b/posthog/models/integration.py index 55ed06232445a..8ce1c9d6ef7c7 100644 --- a/posthog/models/integration.py +++ b/posthog/models/integration.py @@ -106,7 +106,11 @@ def integration_from_slack_response(cls, team_id: str, created_by: User, params: integration, created = Integration.objects.update_or_create( team_id=team_id, kind="slack", - defaults={"config": config, "sensitive_config": sensitive_config, "created_by": created_by}, + defaults={ + "config": config, + "sensitive_config": sensitive_config, + "created_by": created_by, + }, ) return integration @@ -147,6 +151,12 @@ def validate_request(cls, request: Request): @classmethod @cache_for(timedelta(minutes=5)) def slack_config(cls): - config = get_instance_settings(["SLACK_APP_CLIENT_ID", "SLACK_APP_CLIENT_SECRET", "SLACK_APP_SIGNING_SECRET"]) + config = get_instance_settings( + [ + "SLACK_APP_CLIENT_ID", + "SLACK_APP_CLIENT_SECRET", + "SLACK_APP_SIGNING_SECRET", + ] + ) return config diff --git a/posthog/models/messaging.py b/posthog/models/messaging.py index c1a787e30d309..5514f98baccb2 100644 --- a/posthog/models/messaging.py +++ b/posthog/models/messaging.py @@ -31,4 +31,7 @@ class MessagingRecord(UUIDModel): created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) class Meta: - unique_together = ("email_hash", "campaign_key") # can only send campaign once to each email + unique_together = ( + "email_hash", + "campaign_key", + ) # can only send campaign once to each email diff --git a/posthog/models/notebook/notebook.py b/posthog/models/notebook/notebook.py index 490645909df26..ec61ab1c22ed0 100644 --- a/posthog/models/notebook/notebook.py +++ b/posthog/models/notebook/notebook.py @@ -19,7 +19,11 @@ class Notebook(UUIDModel): created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now) last_modified_by: models.ForeignKey = models.ForeignKey( - "User", on_delete=models.SET_NULL, null=True, blank=True, related_name="modified_notebooks" + "User", + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="modified_notebooks", ) class Meta: diff --git a/posthog/models/organization.py b/posthog/models/organization.py index cc4c07568312e..700fea47658f1 100644 --- a/posthog/models/organization.py +++ b/posthog/models/organization.py @@ -24,7 +24,12 @@ from posthog.cloud_utils import is_cloud from posthog.constants import MAX_SLUG_LENGTH, AvailableFeature from posthog.email import is_email_available -from posthog.models.utils import LowercaseSlugField, UUIDModel, create_with_slug, sane_repr +from posthog.models.utils import ( + LowercaseSlugField, + UUIDModel, + create_with_slug, + sane_repr, +) from posthog.redis import get_client from posthog.utils import absolute_uri @@ -56,7 +61,11 @@ def create(self, *args: Any, **kwargs: Any): return create_with_slug(super().create, *args, **kwargs) def bootstrap( - self, user: Optional["User"], *, team_fields: Optional[Dict[str, Any]] = None, **kwargs + self, + user: Optional["User"], + *, + team_fields: Optional[Dict[str, Any]] = None, + **kwargs, ) -> Tuple["Organization", Optional["OrganizationMembership"], "Team"]: """Instead of doing the legwork of creating an organization yourself, delegate the details with bootstrap.""" from .team import Team # Avoiding circular import @@ -67,7 +76,9 @@ def bootstrap( organization_membership: Optional[OrganizationMembership] = None if user is not None: organization_membership = OrganizationMembership.objects.create( - organization=organization, user=user, level=OrganizationMembership.Level.OWNER + organization=organization, + user=user, + level=OrganizationMembership.Level.OWNER, ) user.current_organization = organization user.organization = user.current_organization # Update cached property @@ -111,7 +122,7 @@ class PluginsAccessLevel(models.IntegerChoices): slug: LowercaseSlugField = LowercaseSlugField(unique=True, max_length=MAX_SLUG_LENGTH) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - plugins_access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( + plugins_access_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField( default=PluginsAccessLevel.CONFIG, choices=PluginsAccessLevel.choices, ) @@ -222,8 +233,14 @@ def organization_about_to_be_created(sender, instance: Organization, raw, using, def ensure_available_features_sync(sender, instance: Organization, **kwargs): updated_fields = kwargs.get("update_fields") or [] if "available_features" in updated_fields: - logger.info("Notifying plugin-server to reset available features cache.", {"organization_id": instance.id}) - get_client().publish("reset-available-features-cache", json.dumps({"organization_id": str(instance.id)})) + logger.info( + "Notifying plugin-server to reset available features cache.", + {"organization_id": instance.id}, + ) + get_client().publish( + "reset-available-features-cache", + json.dumps({"organization_id": str(instance.id)}), + ) class OrganizationMembership(UUIDModel): @@ -235,7 +252,10 @@ class Level(models.IntegerChoices): OWNER = 15, "owner" organization: models.ForeignKey = models.ForeignKey( - "posthog.Organization", on_delete=models.CASCADE, related_name="memberships", related_query_name="membership" + "posthog.Organization", + on_delete=models.CASCADE, + related_name="memberships", + related_query_name="membership", ) user: models.ForeignKey = models.ForeignKey( "posthog.User", @@ -251,9 +271,14 @@ class Level(models.IntegerChoices): class Meta: constraints = [ - models.UniqueConstraint(fields=["organization_id", "user_id"], name="unique_organization_membership"), models.UniqueConstraint( - fields=["organization_id"], condition=models.Q(level=15), name="only_one_owner_per_organization" + fields=["organization_id", "user_id"], + name="unique_organization_membership", + ), + models.UniqueConstraint( + fields=["organization_id"], + condition=models.Q(level=15), + name="only_one_owner_per_organization", ), ] @@ -261,7 +286,9 @@ def __str__(self): return str(self.Level(self.level)) def validate_update( - self, membership_being_updated: "OrganizationMembership", new_level: Optional[Level] = None + self, + membership_being_updated: "OrganizationMembership", + new_level: Optional[Level] = None, ) -> None: if new_level is not None: if membership_being_updated.id == self.id: @@ -290,7 +317,10 @@ def validate_update( class OrganizationInvite(UUIDModel): organization: models.ForeignKey = models.ForeignKey( - "posthog.Organization", on_delete=models.CASCADE, related_name="invites", related_query_name="invite" + "posthog.Organization", + on_delete=models.CASCADE, + related_name="invites", + related_query_name="invite", ) target_email: models.EmailField = models.EmailField(null=True, db_index=True) first_name: models.CharField = models.CharField(max_length=30, blank=True, default="") @@ -326,7 +356,8 @@ def validate( if self.is_expired(): raise exceptions.ValidationError( - "This invite has expired. Please ask your admin for a new one.", code="expired" + "This invite has expired. Please ask your admin for a new one.", + code="expired", ) if user is None and User.objects.filter(email=invite_email).exists(): @@ -334,7 +365,8 @@ def validate( if OrganizationMembership.objects.filter(organization=self.organization, user=user).exists(): raise exceptions.ValidationError( - "You already are a member of this organization.", code="user_already_member" + "You already are a member of this organization.", + code="user_already_member", ) if OrganizationMembership.objects.filter( @@ -352,7 +384,12 @@ def use(self, user: "User", *, prevalidated: bool = False) -> None: if is_email_available(with_absolute_urls=True) and self.organization.is_member_join_email_enabled: from posthog.tasks.email import send_member_join - send_member_join.apply_async(kwargs={"invitee_uuid": user.uuid, "organization_id": self.organization_id}) + send_member_join.apply_async( + kwargs={ + "invitee_uuid": user.uuid, + "organization_id": self.organization_id, + } + ) OrganizationInvite.objects.filter(target_email__iexact=self.target_email).delete() def is_expired(self) -> bool: diff --git a/posthog/models/performance/sql.py b/posthog/models/performance/sql.py index 14c5b1763cc08..31914e858b9b9 100644 --- a/posthog/models/performance/sql.py +++ b/posthog/models/performance/sql.py @@ -1,7 +1,16 @@ """https://developer.mozilla.org/en-US/docs/Web/API/PerformanceEntry""" from posthog import settings -from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS_WITH_PARTITION, STORAGE_POLICY, kafka_engine, ttl_period -from posthog.clickhouse.table_engines import Distributed, MergeTreeEngine, ReplicationScheme +from posthog.clickhouse.kafka_engine import ( + KAFKA_COLUMNS_WITH_PARTITION, + STORAGE_POLICY, + kafka_engine, + ttl_period, +) +from posthog.clickhouse.table_engines import ( + Distributed, + MergeTreeEngine, + ReplicationScheme, +) from posthog.kafka_client.topics import KAFKA_PERFORMANCE_EVENTS """ diff --git a/posthog/models/person/person.py b/posthog/models/person/person.py index b2b3bb3e36725..cae5e450fa766 100644 --- a/posthog/models/person/person.py +++ b/posthog/models/person/person.py @@ -66,7 +66,10 @@ def split_person(self, main_distinct_id: Optional[str], max_splits: Optional[int pdi.version = (pdi.version or 0) + 1 pdi.save(update_fields=["version", "person_id"]) - from posthog.models.person.util import create_person, create_person_distinct_id + from posthog.models.person.util import ( + create_person, + create_person_distinct_id, + ) create_person_distinct_id( team_id=self.team_id, @@ -75,7 +78,11 @@ def split_person(self, main_distinct_id: Optional[str], max_splits: Optional[int is_deleted=False, version=pdi.version, ) - create_person(team_id=self.team_id, uuid=str(person.uuid), version=person.version or 0) + create_person( + team_id=self.team_id, + uuid=str(person.uuid), + version=person.version or 0, + ) objects = PersonManager() created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) @@ -138,7 +145,10 @@ class PersonOverride(models.Model): class Meta: constraints = [ - models.UniqueConstraint(fields=["team", "old_person_id"], name="unique override per old_person_id"), + models.UniqueConstraint( + fields=["team", "old_person_id"], + name="unique override per old_person_id", + ), models.CheckConstraint( check=~Q(old_person_id__exact=F("override_person_id")), name="old_person_id_different_from_override_person_id", diff --git a/posthog/models/person/sql.py b/posthog/models/person/sql.py index 61088e6c03761..ffb80869b9e9a 100644 --- a/posthog/models/person/sql.py +++ b/posthog/models/person/sql.py @@ -2,7 +2,11 @@ from posthog.clickhouse.indexes import index_by_kafka_timestamp from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS, STORAGE_POLICY, kafka_engine from posthog.clickhouse.table_engines import CollapsingMergeTree, ReplacingMergeTree -from posthog.kafka_client.topics import KAFKA_PERSON, KAFKA_PERSON_DISTINCT_ID, KAFKA_PERSON_UNIQUE_ID +from posthog.kafka_client.topics import ( + KAFKA_PERSON, + KAFKA_PERSON_DISTINCT_ID, + KAFKA_PERSON_UNIQUE_ID, +) from posthog.settings import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE TRUNCATE_PERSON_TABLE_SQL = f"TRUNCATE TABLE IF EXISTS person ON CLUSTER '{CLICKHOUSE_CLUSTER}'" @@ -48,7 +52,10 @@ ) KAFKA_PERSONS_TABLE_SQL = lambda: PERSONS_TABLE_BASE_SQL.format( - table_name="kafka_" + PERSONS_TABLE, cluster=CLICKHOUSE_CLUSTER, engine=kafka_engine(KAFKA_PERSON), extra_fields="" + table_name="kafka_" + PERSONS_TABLE, + cluster=CLICKHOUSE_CLUSTER, + engine=kafka_engine(KAFKA_PERSON), + extra_fields="", ) # You must include the database here because of a bug in clickhouse @@ -154,7 +161,9 @@ _offset FROM {database}.kafka_{table_name} """.format( - table_name=PERSONS_DISTINCT_ID_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE + table_name=PERSONS_DISTINCT_ID_TABLE, + cluster=CLICKHOUSE_CLUSTER, + database=CLICKHOUSE_DATABASE, ) # @@ -216,7 +225,9 @@ _partition FROM {database}.kafka_{table_name} """.format( - table_name=PERSON_DISTINCT_ID2_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE + table_name=PERSON_DISTINCT_ID2_TABLE, + cluster=CLICKHOUSE_CLUSTER, + database=CLICKHOUSE_DATABASE, ) # diff --git a/posthog/models/person/util.py b/posthog/models/person/util.py index 9af13bc6e9d05..7e8afc3db5e78 100644 --- a/posthog/models/person/util.py +++ b/posthog/models/person/util.py @@ -13,7 +13,11 @@ from posthog.client import sync_execute from posthog.kafka_client.client import ClickhouseProducer -from posthog.kafka_client.topics import KAFKA_PERSON, KAFKA_PERSON_DISTINCT_ID, KAFKA_PERSON_OVERRIDES +from posthog.kafka_client.topics import ( + KAFKA_PERSON, + KAFKA_PERSON_DISTINCT_ID, + KAFKA_PERSON_OVERRIDES, +) from posthog.models.person import Person, PersonDistinctId from posthog.models.person.sql import ( BULK_INSERT_PERSON_DISTINCT_ID2, @@ -53,12 +57,22 @@ def person_distinct_id_created(sender, instance: PersonDistinctId, created, **kw @receiver(post_delete, sender=Person) def person_deleted(sender, instance: Person, **kwargs): - _delete_person(instance.team.id, instance.uuid, int(instance.version or 0), instance.created_at, sync=True) + _delete_person( + instance.team.id, + instance.uuid, + int(instance.version or 0), + instance.created_at, + sync=True, + ) @receiver(post_delete, sender=PersonDistinctId) def person_distinct_id_deleted(sender, instance: PersonDistinctId, **kwargs): _delete_ch_distinct_id( - instance.team.pk, instance.person.uuid, instance.distinct_id, instance.version or 0, sync=True + instance.team.pk, + instance.person.uuid, + instance.distinct_id, + instance.version or 0, + sync=True, ) try: @@ -83,7 +97,11 @@ def bulk_create_persons(persons_list: List[Dict]): for index, person in enumerate(inserted): for distinct_id in persons_list[index]["distinct_ids"]: distinct_ids.append( - PersonDistinctId(person_id=person.pk, distinct_id=distinct_id, team_id=person.team_id) + PersonDistinctId( + person_id=person.pk, + distinct_id=distinct_id, + team_id=person.team_id, + ) ) distinct_id_inserts.append(f"('{distinct_id}', '{person.uuid}', {person.team_id}, 0, 0, now(), 0, 0)") person_mapping[distinct_id] = person @@ -96,7 +114,10 @@ def bulk_create_persons(persons_list: List[Dict]): PersonDistinctId.objects.bulk_create(distinct_ids) sync_execute(INSERT_PERSON_BULK_SQL + ", ".join(person_inserts), flush=False) - sync_execute(BULK_INSERT_PERSON_DISTINCT_ID2 + ", ".join(distinct_id_inserts), flush=False) + sync_execute( + BULK_INSERT_PERSON_DISTINCT_ID2 + ", ".join(distinct_id_inserts), + flush=False, + ) return person_mapping @@ -147,7 +168,12 @@ def create_person( def create_person_distinct_id( - team_id: int, distinct_id: str, person_id: str, version=0, is_deleted: bool = False, sync: bool = False + team_id: int, + distinct_id: str, + person_id: str, + version=0, + is_deleted: bool = False, + sync: bool = False, ) -> None: p = ClickhouseProducer() p.produce( @@ -191,7 +217,9 @@ def create_person_override( def get_persons_by_distinct_ids(team_id: int, distinct_ids: List[str]) -> QuerySet: return Person.objects.filter( - team_id=team_id, persondistinctid__team_id=team_id, persondistinctid__distinct_id__in=distinct_ids + team_id=team_id, + persondistinctid__team_id=team_id, + persondistinctid__distinct_id__in=distinct_ids, ) @@ -208,7 +236,11 @@ def delete_person(person: Person, sync: bool = False) -> None: def _delete_person( - team_id: int, uuid: UUID, version: int, created_at: Optional[datetime.datetime] = None, sync: bool = False + team_id: int, + uuid: UUID, + version: int, + created_at: Optional[datetime.datetime] = None, + sync: bool = False, ) -> None: create_person( uuid=str(uuid), diff --git a/posthog/models/person_overrides/sql.py b/posthog/models/person_overrides/sql.py index 853988495f639..c518db6de0e11 100644 --- a/posthog/models/person_overrides/sql.py +++ b/posthog/models/person_overrides/sql.py @@ -14,7 +14,11 @@ from django.conf import settings from posthog.kafka_client.topics import KAFKA_PERSON_OVERRIDE -from posthog.settings.data_stores import CLICKHOUSE_CLUSTER, CLICKHOUSE_DATABASE, KAFKA_HOSTS +from posthog.settings.data_stores import ( + CLICKHOUSE_CLUSTER, + CLICKHOUSE_DATABASE, + KAFKA_HOSTS, +) PERSON_OVERRIDES_CREATE_TABLE_SQL = f""" CREATE TABLE IF NOT EXISTS `{CLICKHOUSE_DATABASE}`.`person_overrides` diff --git a/posthog/models/personal_api_key.py b/posthog/models/personal_api_key.py index 7d42679a627a4..8692654e3861a 100644 --- a/posthog/models/personal_api_key.py +++ b/posthog/models/personal_api_key.py @@ -29,5 +29,9 @@ class PersonalAPIKey(models.Model): # DEPRECATED: personal API keys are now specifically personal, without team affiliation team = models.ForeignKey( - "posthog.Team", on_delete=models.SET_NULL, related_name="personal_api_keys+", null=True, blank=True + "posthog.Team", + on_delete=models.SET_NULL, + related_name="personal_api_keys+", + null=True, + blank=True, ) diff --git a/posthog/models/plugin.py b/posthog/models/plugin.py index 0f055f9d68c49..b8787dd3df344 100644 --- a/posthog/models/plugin.py +++ b/posthog/models/plugin.py @@ -135,12 +135,24 @@ def install(self, **kwargs) -> "Plugin": class Plugin(models.Model): class PluginType(models.TextChoices): LOCAL = "local", "local" # url starts with "file:" - CUSTOM = "custom", "custom" # github or npm url downloaded as zip or tar.gz into field "archive" - REPOSITORY = "repository", "repository" # same, but originating from our plugins.json repository - SOURCE = "source", "source" # coded inside the browser (versioned via plugin_source_version) + CUSTOM = ( + "custom", + "custom", + ) # github or npm url downloaded as zip or tar.gz into field "archive" + REPOSITORY = ( + "repository", + "repository", + ) # same, but originating from our plugins.json repository + SOURCE = ( + "source", + "source", + ) # coded inside the browser (versioned via plugin_source_version) organization: models.ForeignKey = models.ForeignKey( - "posthog.Organization", on_delete=models.CASCADE, related_name="plugins", related_query_name="plugin" + "posthog.Organization", + on_delete=models.CASCADE, + related_name="plugins", + related_query_name="plugin", ) plugin_type: models.CharField = models.CharField( max_length=200, null=True, blank=True, choices=PluginType.choices, default=None @@ -240,7 +252,10 @@ class PluginAttachment(models.Model): class PluginStorage(models.Model): class Meta: constraints = [ - models.UniqueConstraint(fields=["plugin_config_id", "key"], name="posthog_unique_plugin_storage_key") + models.UniqueConstraint( + fields=["plugin_config_id", "key"], + name="posthog_unique_plugin_storage_key", + ) ] plugin_config: models.ForeignKey = models.ForeignKey("PluginConfig", on_delete=models.CASCADE) @@ -266,7 +281,10 @@ class PluginSourceFileManager(models.Manager): def sync_from_plugin_archive( self, plugin: Plugin, plugin_json_parsed: Optional[Dict[str, Any]] = None ) -> Tuple[ - "PluginSourceFile", Optional["PluginSourceFile"], Optional["PluginSourceFile"], Optional["PluginSourceFile"] + "PluginSourceFile", + Optional["PluginSourceFile"], + Optional["PluginSourceFile"], + Optional["PluginSourceFile"], ]: """Create PluginSourceFile objects from a plugin that has an archive. @@ -281,7 +299,12 @@ def sync_from_plugin_archive( plugin_json_instance, _ = PluginSourceFile.objects.update_or_create( plugin=plugin, filename="plugin.json", - defaults={"source": plugin_json, "transpiled": None, "status": None, "error": None}, + defaults={ + "source": plugin_json, + "transpiled": None, + "status": None, + "error": None, + }, ) # Save frontend.tsx frontend_tsx_instance: Optional["PluginSourceFile"] = None @@ -289,7 +312,12 @@ def sync_from_plugin_archive( frontend_tsx_instance, _ = PluginSourceFile.objects.update_or_create( plugin=plugin, filename="frontend.tsx", - defaults={"source": frontend_tsx, "transpiled": None, "status": None, "error": None}, + defaults={ + "source": frontend_tsx, + "transpiled": None, + "status": None, + "error": None, + }, ) else: filenames_to_delete.append("frontend.tsx") @@ -299,7 +327,12 @@ def sync_from_plugin_archive( site_ts_instance, _ = PluginSourceFile.objects.update_or_create( plugin=plugin, filename="site.ts", - defaults={"source": site_ts, "transpiled": None, "status": None, "error": None}, + defaults={ + "source": site_ts, + "transpiled": None, + "status": None, + "error": None, + }, ) else: filenames_to_delete.append("site.ts") @@ -311,7 +344,12 @@ def sync_from_plugin_archive( index_ts_instance, _ = PluginSourceFile.objects.update_or_create( plugin=plugin, filename="index.ts", - defaults={"source": index_ts, "transpiled": None, "status": None, "error": None}, + defaults={ + "source": index_ts, + "transpiled": None, + "status": None, + "error": None, + }, ) else: filenames_to_delete.append("index.ts") @@ -319,7 +357,12 @@ def sync_from_plugin_archive( PluginSourceFile.objects.filter(plugin=plugin, filename__in=filenames_to_delete).delete() # Trigger plugin server reload and code transpilation plugin.save() - return plugin_json_instance, index_ts_instance, frontend_tsx_instance, site_ts_instance + return ( + plugin_json_instance, + index_ts_instance, + frontend_tsx_instance, + site_ts_instance, + ) class PluginSourceFile(UUIDModel): @@ -431,7 +474,8 @@ def preinstall_plugins_for_new_organization(sender, instance: Organization, crea ) except Exception as e: print( - f"⚠️ Cannot preinstall plugin from {plugin_url}, skipping it for organization {instance.name}:\n", e + f"⚠️ Cannot preinstall plugin from {plugin_url}, skipping it for organization {instance.name}:\n", + e, ) @@ -439,7 +483,6 @@ def preinstall_plugins_for_new_organization(sender, instance: Organization, crea def enable_preinstalled_plugins_for_new_team(sender, instance: Team, created: bool, **kwargs): if created and can_configure_plugins(instance.organization): for order, preinstalled_plugin in enumerate(Plugin.objects.filter(is_preinstalled=True)): - PluginConfig.objects.create( team=instance, plugin=preinstalled_plugin, diff --git a/posthog/models/prompt/prompt.py b/posthog/models/prompt/prompt.py index 74d55a0f43354..2d975a54b3e1a 100644 --- a/posthog/models/prompt/prompt.py +++ b/posthog/models/prompt/prompt.py @@ -4,7 +4,6 @@ class Prompt(models.Model): - step: models.IntegerField = models.IntegerField() type: models.CharField = models.CharField(max_length=200) # tooltip, modal, etc title: models.CharField = models.CharField(max_length=200) diff --git a/posthog/models/property/property.py b/posthog/models/property/property.py index ff57e46b77e21..3b2b2decbc574 100644 --- a/posthog/models/property/property.py +++ b/posthog/models/property/property.py @@ -78,7 +78,12 @@ class BehavioralPropertyType(str, Enum): PropertyIdentifier = Tuple[PropertyName, PropertyType, Optional[GroupTypeIndex]] NEGATED_OPERATORS = ["is_not", "not_icontains", "not_regex", "is_not_set"] -CLICKHOUSE_ONLY_PROPERTY_TYPES = ["static-cohort", "precalculated-cohort", "behavioral", "recording"] +CLICKHOUSE_ONLY_PROPERTY_TYPES = [ + "static-cohort", + "precalculated-cohort", + "behavioral", + "recording", +] VALIDATE_PROP_TYPES = { "event": ["key", "value"], @@ -95,7 +100,13 @@ class BehavioralPropertyType(str, Enum): } VALIDATE_BEHAVIORAL_PROP_TYPES = { - BehavioralPropertyType.PERFORMED_EVENT: ["key", "value", "event_type", "time_value", "time_interval"], + BehavioralPropertyType.PERFORMED_EVENT: [ + "key", + "value", + "event_type", + "time_value", + "time_interval", + ], BehavioralPropertyType.PERFORMED_EVENT_MULTIPLE: [ "key", "value", @@ -104,7 +115,13 @@ class BehavioralPropertyType(str, Enum): "time_interval", "operator_value", ], - BehavioralPropertyType.PERFORMED_EVENT_FIRST_TIME: ["key", "value", "event_type", "time_value", "time_interval"], + BehavioralPropertyType.PERFORMED_EVENT_FIRST_TIME: [ + "key", + "value", + "event_type", + "time_value", + "time_interval", + ], BehavioralPropertyType.PERFORMED_EVENT_SEQUENCE: [ "key", "value", @@ -282,7 +299,11 @@ class PropertyGroup: type: PropertyOperatorType values: Union[List[Property], List["PropertyGroup"]] - def __init__(self, type: PropertyOperatorType, values: Union[List[Property], List["PropertyGroup"]]) -> None: + def __init__( + self, + type: PropertyOperatorType, + values: Union[List[Property], List["PropertyGroup"]], + ) -> None: self.type = type self.values = values @@ -310,7 +331,10 @@ def to_dict(self): if not self.values: return {} - return {"type": self.type.value, "values": [prop.to_dict() for prop in self.values]} + return { + "type": self.type.value, + "values": [prop.to_dict() for prop in self.values], + } def __repr__(self): params_repr = ", ".join(f"{repr(prop)}" for prop in self.values) diff --git a/posthog/models/property/util.py b/posthog/models/property/util.py index 18368ac082f5d..b353eb11bb141 100644 --- a/posthog/models/property/util.py +++ b/posthog/models/property/util.py @@ -17,7 +17,10 @@ from posthog.clickhouse.client.escape import escape_param_for_clickhouse from posthog.clickhouse.kafka_engine import trim_quotes_expr -from posthog.clickhouse.materialized_columns import TableWithProperties, get_materialized_columns +from posthog.clickhouse.materialized_columns import ( + TableWithProperties, + get_materialized_columns, +) from posthog.constants import PropertyOperatorType from posthog.hogql import ast from posthog.hogql.hogql import HogQLContext @@ -36,7 +39,10 @@ ) from posthog.models.event import Selector from posthog.models.group.sql import GET_GROUP_IDS_BY_PROPERTY_SQL -from posthog.models.person.sql import GET_DISTINCT_IDS_BY_PERSON_ID_FILTER, GET_DISTINCT_IDS_BY_PROPERTY_SQL +from posthog.models.person.sql import ( + GET_DISTINCT_IDS_BY_PERSON_ID_FILTER, + GET_DISTINCT_IDS_BY_PROPERTY_SQL, +) from posthog.models.property import ( NEGATED_OPERATORS, OperatorType, @@ -177,13 +183,19 @@ def parse_prop_clauses( else: if person_properties_mode == PersonPropertiesMode.USING_SUBQUERY: person_id_query, cohort_filter_params = format_filter_query( - cohort, idx, hogql_context, custom_match_field=person_id_joined_alias + cohort, + idx, + hogql_context, + custom_match_field=person_id_joined_alias, ) params = {**params, **cohort_filter_params} final.append(f"{property_operator} {table_formatted}distinct_id IN ({person_id_query})") else: person_id_query, cohort_filter_params = format_cohort_subquery( - cohort, idx, hogql_context, custom_match_field=f"{person_id_joined_alias}" + cohort, + idx, + hogql_context, + custom_match_field=f"{person_id_joined_alias}", ) params = {**params, **cohort_filter_params} final.append(f"{property_operator} {person_id_query}") @@ -236,7 +248,8 @@ def parse_prop_clauses( final.append( " {property_operator} {table_name}distinct_id IN ({filter_query})".format( filter_query=GET_DISTINCT_IDS_BY_PROPERTY_SQL.format( - filters=filter_query, GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id) + filters=filter_query, + GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id), ), table_name=table_formatted, property_operator=property_operator, @@ -270,7 +283,10 @@ def parse_prop_clauses( params.update(filter_params) elif prop.type == "element": query, filter_params = filter_element( - cast(StringMatching, prop.key), prop.value, operator=prop.operator, prepend="{}_".format(prepend) + cast(StringMatching, prop.key), + prop.value, + operator=prop.operator, + prepend="{}_".format(prepend), ) if query: final.append(f"{property_operator} {query}") @@ -278,7 +294,10 @@ def parse_prop_clauses( elif ( prop.type == "group" and person_properties_mode - in [PersonPropertiesMode.DIRECT_ON_EVENTS, PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2] + in [ + PersonPropertiesMode.DIRECT_ON_EVENTS, + PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2, + ] and groups_on_events_querying_enabled() ): group_column = f"group{prop.group_type_index}_properties" @@ -308,7 +327,11 @@ def parse_prop_clauses( else: # :TRICKY: offer groups support for queries which don't support automatically joining with groups table yet (e.g. lifecycle) filter_query, filter_params = prop_filter_json_extract( - prop, idx, prepend, prop_var=f"group_properties", allow_denormalized_props=False + prop, + idx, + prepend, + prop_var=f"group_properties", + allow_denormalized_props=False, ) group_type_index_var = f"{prepend}_group_type_index_{idx}" groups_subquery = GET_GROUP_IDS_BY_PROPERTY_SQL.format( @@ -335,7 +358,8 @@ def parse_prop_clauses( else: # :TODO: (performance) Avoid subqueries whenever possible, use joins instead subquery = GET_DISTINCT_IDS_BY_PERSON_ID_FILTER.format( - filters=filter_query, GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id) + filters=filter_query, + GET_TEAM_PERSON_DISTINCT_IDS=get_team_distinct_ids_query(team_id), ) final.append(f"{property_operator} {table_formatted}distinct_id IN ({subquery})") params.update(filter_params) @@ -415,28 +439,46 @@ def prop_filter_json_extract( params: Dict[str, Any] = {} if operator == "is_not": - params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): box_value(prop.value)} + params = { + "k{}_{}".format(prepend, idx): prop.key, + "v{}_{}".format(prepend, idx): box_value(prop.value), + } return ( " {property_operator} NOT has(%(v{prepend}_{idx})s, {left})".format( - idx=idx, prepend=prepend, left=property_expr, property_operator=property_operator + idx=idx, + prepend=prepend, + left=property_expr, + property_operator=property_operator, ), params, ) elif operator == "icontains": value = "%{}%".format(prop.value) - params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): value} + params = { + "k{}_{}".format(prepend, idx): prop.key, + "v{}_{}".format(prepend, idx): value, + } return ( " {property_operator} {left} ILIKE %(v{prepend}_{idx})s".format( - idx=idx, prepend=prepend, left=property_expr, property_operator=property_operator + idx=idx, + prepend=prepend, + left=property_expr, + property_operator=property_operator, ), params, ) elif operator == "not_icontains": value = "%{}%".format(prop.value) - params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): value} + params = { + "k{}_{}".format(prepend, idx): prop.key, + "v{}_{}".format(prepend, idx): value, + } return ( " {property_operator} NOT ({left} ILIKE %(v{prepend}_{idx})s)".format( - idx=idx, prepend=prepend, left=property_expr, property_operator=property_operator + idx=idx, + prepend=prepend, + left=property_expr, + property_operator=property_operator, ), params, ) @@ -445,7 +487,10 @@ def prop_filter_json_extract( # If OR'ing, shouldn't be a problem since nothing will match this specific clause return f"{property_operator} 1 = 2", {} - params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): prop.value} + params = { + "k{}_{}".format(prepend, idx): prop.key, + "v{}_{}".format(prepend, idx): prop.value, + } return ( " {property_operator} {regex_function}({left}, %(v{prepend}_{idx})s)".format( @@ -458,7 +503,10 @@ def prop_filter_json_extract( params, ) elif operator == "is_set": - params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): prop.value} + params = { + "k{}_{}".format(prepend, idx): prop.key, + "v{}_{}".format(prepend, idx): prop.value, + } if is_denormalized: return ( " {property_operator} notEmpty({left})".format(left=property_expr, property_operator=property_operator), @@ -466,12 +514,18 @@ def prop_filter_json_extract( ) return ( " {property_operator} JSONHas({prop_var}, %(k{prepend}_{idx})s)".format( - idx=idx, prepend=prepend, prop_var=prop_var, property_operator=property_operator + idx=idx, + prepend=prepend, + prop_var=prop_var, + property_operator=property_operator, ), params, ) elif operator == "is_not_set": - params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): prop.value} + params = { + "k{}_{}".format(prepend, idx): prop.key, + "v{}_{}".format(prepend, idx): prop.value, + } if is_denormalized: return ( " {property_operator} empty({left})".format(left=property_expr, property_operator=property_operator), @@ -479,7 +533,11 @@ def prop_filter_json_extract( ) return ( " {property_operator} (isNull({left}) OR NOT JSONHas({prop_var}, %(k{prepend}_{idx})s))".format( - idx=idx, prepend=prepend, prop_var=prop_var, left=property_expr, property_operator=property_operator + idx=idx, + prepend=prepend, + prop_var=prop_var, + left=property_expr, + property_operator=property_operator, ), params, ) @@ -496,7 +554,10 @@ def prop_filter_json_extract( parseDateTimeBestEffortOrNull(substring({property_expr}, 1, 10)) )) = %({prop_value_param_key})s""" - return (query, {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value}) + return ( + query, + {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value}, + ) elif operator == "is_date_after": # TODO introducing duplication in these branches now rather than refactor too early assert isinstance(prop.value, str) @@ -518,7 +579,10 @@ def prop_filter_json_extract( query = f"""{property_operator} {first_of_date_or_timestamp} > {adjusted_value}""" - return (query, {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value}) + return ( + query, + {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value}, + ) elif operator == "is_date_before": # TODO introducing duplication in these branches now rather than refactor too early assert isinstance(prop.value, str) @@ -528,11 +592,17 @@ def prop_filter_json_extract( first_of_date_or_timestamp = f"coalesce({try_parse_as_date},{try_parse_as_timestamp})" query = f"""{property_operator} {first_of_date_or_timestamp} < %({prop_value_param_key})s""" - return (query, {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value}) + return ( + query, + {"k{}_{}".format(prepend, idx): prop.key, prop_value_param_key: prop.value}, + ) elif operator in ["gt", "lt", "gte", "lte"]: count_operator = get_count_operator(operator) - params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): prop.value} + params = { + "k{}_{}".format(prepend, idx): prop.key, + "v{}_{}".format(prepend, idx): prop.value, + } extract_property_expr = trim_quotes_expr(f"replaceRegexpAll({property_expr}, ' ', '')") return ( f" {property_operator} toFloat64OrNull({extract_property_expr}) {count_operator} %(v{prepend}_{idx})s", @@ -547,10 +617,17 @@ def prop_filter_json_extract( } else: clause = " {property_operator} has(%(v{prepend}_{idx})s, {left})" - params = {"k{}_{}".format(prepend, idx): prop.key, "v{}_{}".format(prepend, idx): box_value(prop.value)} + params = { + "k{}_{}".format(prepend, idx): prop.key, + "v{}_{}".format(prepend, idx): box_value(prop.value), + } return ( clause.format( - left=property_expr, idx=idx, prepend=prepend, prop_var=prop_var, property_operator=property_operator + left=property_expr, + idx=idx, + prepend=prepend, + prop_var=prop_var, + property_operator=property_operator, ), params, ) @@ -664,7 +741,10 @@ def get_property_string_expr( and (property_name, materialised_table_column) in materialized_columns and ("group" not in materialised_table_column or groups_on_events_querying_enabled()) ): - return f'{table_string}"{materialized_columns[(property_name, materialised_table_column)]}"', True + return ( + f'{table_string}"{materialized_columns[(property_name, materialised_table_column)]}"', + True, + ) return trim_quotes_expr(f"JSONExtractRaw({table_string}{column}, {var})"), False @@ -731,7 +811,10 @@ def filter_element( raise ValueError(f'Invalid element filtering key "{key}"') if combination_conditions: - return f"{'NOT ' if operator in NEGATED_OPERATORS else ''}({' OR '.join(combination_conditions)})", params + return ( + f"{'NOT ' if operator in NEGATED_OPERATORS else ''}({' OR '.join(combination_conditions)})", + params, + ) else: # If there are no values to filter by, this either matches nothing (for non-negated operators like "equals"), # or everything (for negated operators like "doesn't equal") @@ -837,7 +920,10 @@ def get_session_property_filter_statement(prop: Property, idx: int, prepend: str value = f"session_duration_value{prepend}_{idx}" operator = get_count_operator(prop.operator) - return (f"{SessionQuery.SESSION_TABLE_ALIAS}.session_duration {operator} %({value})s", {value: duration}) + return ( + f"{SessionQuery.SESSION_TABLE_ALIAS}.session_duration {operator} %({value})s", + {value: duration}, + ) else: raise exceptions.ValidationError(f"Property '{prop.key}' is not allowed in session property filters.") diff --git a/posthog/models/property_definition.py b/posthog/models/property_definition.py index b295229a8cfcd..7747a17c71820 100644 --- a/posthog/models/property_definition.py +++ b/posthog/models/property_definition.py @@ -16,7 +16,10 @@ class PropertyType(models.TextChoices): class PropertyFormat(models.TextChoices): UnixTimestamp = "unix_timestamp", "Unix Timestamp in seconds" - UnixTimestampMilliseconds = "unix_timestamp_milliseconds", "Unix Timestamp in milliseconds" + UnixTimestampMilliseconds = ( + "unix_timestamp_milliseconds", + "Unix Timestamp in milliseconds", + ) ISO8601Date = "YYYY-MM-DDThh:mm:ssZ", "YYYY-MM-DDThh:mm:ssZ" FullDate = "YYYY-MM-DD hh:mm:ss", "YYYY-MM-DD hh:mm:ss" FullDateIncreasing = "DD-MM-YYYY hh:mm:ss", "DD-MM-YYYY hh:mm:ss" @@ -33,7 +36,10 @@ class Type(models.IntegerChoices): GROUP = 3, "group" team: models.ForeignKey = models.ForeignKey( - Team, on_delete=models.CASCADE, related_name="property_definitions", related_query_name="team" + Team, + on_delete=models.CASCADE, + related_name="property_definitions", + related_query_name="team", ) name: models.CharField = models.CharField(max_length=400) is_numerical: models.BooleanField = models.BooleanField( @@ -45,7 +51,7 @@ class Type(models.IntegerChoices): # :TRICKY: May be null for historical events type: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(default=Type.EVENT, choices=Type.choices) # Only populated for `Type.GROUP` - group_type_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(null=True) + group_type_index: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(null=True) # DEPRECATED property_type_format = models.CharField( @@ -76,15 +82,19 @@ class Meta: models.Index(fields=["team_id", "type", "is_numerical"]), ] + [ GinIndex( - name="index_property_definition_name", fields=["name"], opclasses=["gin_trgm_ops"] + name="index_property_definition_name", + fields=["name"], + opclasses=["gin_trgm_ops"], ) # To speed up DB-based fuzzy searching ] constraints = [ models.CheckConstraint( - name="property_type_is_valid", check=models.Q(property_type__in=PropertyType.values) + name="property_type_is_valid", + check=models.Q(property_type__in=PropertyType.values), ), models.CheckConstraint( - name="group_type_index_set", check=~models.Q(type=3) | models.Q(group_type_index__isnull=False) + name="group_type_index_set", + check=~models.Q(type=3) | models.Q(group_type_index__isnull=False), ), UniqueConstraintByExpression( name="posthog_propertydefinition_uniq", diff --git a/posthog/models/sharing_configuration.py b/posthog/models/sharing_configuration.py index 7dcdcb7e8f2b9..44cc70cbb7be4 100644 --- a/posthog/models/sharing_configuration.py +++ b/posthog/models/sharing_configuration.py @@ -26,7 +26,11 @@ class SharingConfiguration(models.Model): enabled: models.BooleanField = models.BooleanField(default=False) access_token: models.CharField = models.CharField( - max_length=400, null=True, blank=True, default=get_default_access_token, unique=True + max_length=400, + null=True, + blank=True, + default=get_default_access_token, + unique=True, ) def can_access_object(self, obj: models.Model): diff --git a/posthog/models/subscription.py b/posthog/models/subscription.py index e291f7c1b0490..3680155f7df27 100644 --- a/posthog/models/subscription.py +++ b/posthog/models/subscription.py @@ -92,7 +92,10 @@ def __init__(self, *args, **kwargs): interval: models.IntegerField = models.IntegerField(default=1) count: models.IntegerField = models.IntegerField(null=True) byweekday: ArrayField = ArrayField( - models.CharField(max_length=10, choices=SubscriptionByWeekDay.choices), null=True, blank=True, default=None + models.CharField(max_length=10, choices=SubscriptionByWeekDay.choices), + null=True, + blank=True, + default=None, ) bysetpos: models.IntegerField = models.IntegerField(null=True) start_date: models.DateTimeField = models.DateTimeField() @@ -141,7 +144,9 @@ def url(self): def resource_info(self) -> Optional[SubscriptionResourceInfo]: if self.insight: return SubscriptionResourceInfo( - "Insight", f"{self.insight.name or self.insight.derived_name}", self.insight.url + "Insight", + f"{self.insight.name or self.insight.derived_name}", + self.insight.url, ) elif self.dashboard: return SubscriptionResourceInfo("Dashboard", self.dashboard.name, self.dashboard.url) @@ -151,14 +156,25 @@ def resource_info(self) -> Optional[SubscriptionResourceInfo]: @property def summary(self): try: - human_frequency = {"daily": "day", "weekly": "week", "monthly": "month", "yearly": "year"}[self.frequency] + human_frequency = { + "daily": "day", + "weekly": "week", + "monthly": "month", + "yearly": "year", + }[self.frequency] if self.interval > 1: human_frequency = f"{human_frequency}s" summary = f"sent every {str(self.interval) + ' ' if self.interval > 1 else ''}{human_frequency}" if self.byweekday and self.bysetpos: - human_bysetpos = {1: "first", 2: "second", 3: "third", 4: "fourth", -1: "last"}[self.bysetpos] + human_bysetpos = { + 1: "first", + 2: "second", + 3: "third", + 4: "fourth", + -1: "last", + }[self.bysetpos] summary += ( f" on the {human_bysetpos} {self.byweekday[0].capitalize() if len(self.byweekday) == 1 else 'day'}" ) diff --git a/posthog/models/tagged_item.py b/posthog/models/tagged_item.py index 3d6b73383aaf8..4c55c4a663791 100644 --- a/posthog/models/tagged_item.py +++ b/posthog/models/tagged_item.py @@ -6,7 +6,14 @@ from posthog.models.utils import UUIDModel -RELATED_OBJECTS = ("dashboard", "insight", "event_definition", "property_definition", "action", "feature_flag") +RELATED_OBJECTS = ( + "dashboard", + "insight", + "event_definition", + "property_definition", + "action", + "feature_flag", +) # Checks that exactly one object field is populated @@ -14,7 +21,10 @@ def build_check(related_objects: Iterable[str]): built_check_list: List[Union[Q, Q]] = [] for field in related_objects: built_check_list.append( - Q(*[(f"{other_field}__isnull", other_field != field) for other_field in related_objects], _connector="AND") + Q( + *[(f"{other_field}__isnull", other_field != field) for other_field in related_objects], + _connector="AND", + ) ) return Q(*built_check_list, _connector="OR") @@ -23,7 +33,9 @@ def build_check(related_objects: Iterable[str]): # uniqueness across null columns. def build_partial_uniqueness_constraint(field: str): return UniqueConstraint( - fields=["tag", field], name=f"unique_{field}_tagged_item", condition=Q((f"{field}__isnull", False)) + fields=["tag", field], + name=f"unique_{field}_tagged_item", + condition=Q((f"{field}__isnull", False)), ) @@ -47,22 +59,46 @@ class TaggedItem(UUIDModel): # When adding a new taggeditem-model relationship, make sure to add the foreign key field and append field name to # the `RELATED_OBJECTS` tuple above. dashboard: models.ForeignKey = models.ForeignKey( - "Dashboard", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items" + "Dashboard", + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="tagged_items", ) insight: models.ForeignKey = models.ForeignKey( - "Insight", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items" + "Insight", + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="tagged_items", ) event_definition: models.ForeignKey = models.ForeignKey( - "EventDefinition", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items" + "EventDefinition", + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="tagged_items", ) property_definition: models.ForeignKey = models.ForeignKey( - "PropertyDefinition", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items" + "PropertyDefinition", + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="tagged_items", ) action: models.ForeignKey = models.ForeignKey( - "Action", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items" + "Action", + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="tagged_items", ) feature_flag: models.ForeignKey = models.ForeignKey( - "FeatureFlag", on_delete=models.CASCADE, null=True, blank=True, related_name="tagged_items" + "FeatureFlag", + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="tagged_items", ) class Meta: diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index 4cd9ae773fdeb..bc458807b56a4 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -7,7 +7,11 @@ import pytz from django.conf import settings from django.contrib.postgres.fields import ArrayField -from django.core.validators import MinLengthValidator, MaxValueValidator, MinValueValidator +from django.core.validators import ( + MinLengthValidator, + MaxValueValidator, + MinValueValidator, +) from django.db import models from django.db.models.signals import post_delete, post_save from zoneinfo import ZoneInfo @@ -20,7 +24,11 @@ from posthog.models.filters.utils import GroupTypeIndex from posthog.models.instance_setting import get_instance_setting from posthog.models.signals import mutable_receiver -from posthog.models.utils import UUIDClassicModel, generate_random_token_project, sane_repr +from posthog.models.utils import ( + UUIDClassicModel, + generate_random_token_project, + sane_repr, +) from posthog.settings.utils import get_list from posthog.utils import GenericEmails, PersonOnEventsMode @@ -66,7 +74,12 @@ def set_test_account_filters(self, organization: Optional[Any]) -> List: example_email = re.search(r"@[\w.]+", example_emails[0]) if example_email: return [ - {"key": "email", "operator": "not_icontains", "value": example_email.group(), "type": "person"} + { + "key": "email", + "operator": "not_icontains", + "value": example_email.group(), + "type": "person", + } ] + filters return filters @@ -126,7 +139,10 @@ def clickhouse_mode(self) -> str: class Team(UUIDClassicModel): organization: models.ForeignKey = models.ForeignKey( - "posthog.Organization", on_delete=models.CASCADE, related_name="teams", related_query_name="team" + "posthog.Organization", + on_delete=models.CASCADE, + related_name="teams", + related_query_name="team", ) api_token: models.CharField = models.CharField( max_length=200, @@ -136,7 +152,9 @@ class Team(UUIDClassicModel): ) app_urls: ArrayField = ArrayField(models.CharField(max_length=200, null=True), default=list, blank=True) name: models.CharField = models.CharField( - max_length=200, default="Default Project", validators=[MinLengthValidator(1, "Project must have a name!")] + max_length=200, + default="Default Project", + validators=[MinLengthValidator(1, "Project must have a name!")], ) slack_incoming_webhook: models.CharField = models.CharField(max_length=500, null=True, blank=True) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) @@ -157,8 +175,10 @@ class Team(UUIDClassicModel): decimal_places=2, validators=[MinValueValidator(Decimal(0)), MaxValueValidator(Decimal(1))], ) - session_recording_minimum_duration_milliseconds: models.IntegerField = models.IntegerField( - null=True, blank=True, validators=[MinValueValidator(0), MaxValueValidator(15000)] + session_recording_minimum_duration_milliseconds: (models.IntegerField) = models.IntegerField( + null=True, + blank=True, + validators=[MinValueValidator(0), MaxValueValidator(15000)], ) session_recording_linked_flag: models.JSONField = models.JSONField(null=True, blank=True) capture_console_log_opt_in: models.BooleanField = models.BooleanField(null=True, blank=True) @@ -185,7 +205,11 @@ class Team(UUIDClassicModel): recording_domains: ArrayField = ArrayField(models.CharField(max_length=200, null=True), blank=True, null=True) primary_dashboard: models.ForeignKey = models.ForeignKey( - "posthog.Dashboard", on_delete=models.SET_NULL, null=True, related_name="primary_dashboard_teams", blank=True + "posthog.Dashboard", + on_delete=models.SET_NULL, + null=True, + related_name="primary_dashboard_teams", + blank=True, ) # Dashboard shown on project homepage # Generic field for storing any team-specific context that is more temporary in nature and thus @@ -233,7 +257,10 @@ def person_on_events_mode(self) -> PersonOnEventsMode: if self._person_on_events_querying_enabled: # also tag person_on_events_enabled for legacy compatibility - tag_queries(person_on_events_enabled=True, person_on_events_mode=PersonOnEventsMode.V1_ENABLED) + tag_queries( + person_on_events_enabled=True, + person_on_events_mode=PersonOnEventsMode.V1_ENABLED, + ) return PersonOnEventsMode.V1_ENABLED return PersonOnEventsMode.DISABLED @@ -259,7 +286,10 @@ def _person_on_events_querying_enabled(self) -> bool: str(self.uuid), groups={"organization": str(self.organization_id)}, group_properties={ - "organization": {"id": str(self.organization_id), "created_at": self.organization.created_at} + "organization": { + "id": str(self.organization_id), + "created_at": self.organization.created_at, + } }, only_evaluate_locally=True, send_feature_flag_events=False, @@ -280,7 +310,10 @@ def _person_on_events_v2_querying_enabled(self) -> bool: str(self.uuid), groups={"organization": str(self.organization_id)}, group_properties={ - "organization": {"id": str(self.organization_id), "created_at": self.organization.created_at} + "organization": { + "id": str(self.organization_id), + "created_at": self.organization.created_at, + } }, only_evaluate_locally=True, send_feature_flag_events=False, diff --git a/posthog/models/team/util.py b/posthog/models/team/util.py index b2fa36b1430dd..ccaa249c559bf 100644 --- a/posthog/models/team/util.py +++ b/posthog/models/team/util.py @@ -49,6 +49,7 @@ def delete_batch_exports(team_ids: List[int]): can_enable_actor_on_events = False + # :TRICKY: Avoid overly eagerly checking whether the migration is complete. # We instead cache negative responses for a minute and a positive one forever. def actor_on_events_ready() -> bool: diff --git a/posthog/models/test/test_activity_logging.py b/posthog/models/test/test_activity_logging.py index ebe161d5e3986..e7f3ed4c13663 100644 --- a/posthog/models/test/test_activity_logging.py +++ b/posthog/models/test/test_activity_logging.py @@ -13,6 +13,33 @@ def test_dict_changes_between(self): self.assertEqual(len(changes), 3) - self.assertIn(Change(type="Plugin", action="changed", field="change_field", before="foo", after="bar"), changes) - self.assertIn(Change(type="Plugin", action="created", field="new_field", before=None, after="bar"), changes) - self.assertIn(Change(type="Plugin", action="deleted", field="delete_field", before="foo", after=None), changes) + self.assertIn( + Change( + type="Plugin", + action="changed", + field="change_field", + before="foo", + after="bar", + ), + changes, + ) + self.assertIn( + Change( + type="Plugin", + action="created", + field="new_field", + before=None, + after="bar", + ), + changes, + ) + self.assertIn( + Change( + type="Plugin", + action="deleted", + field="delete_field", + before="foo", + after=None, + ), + changes, + ) diff --git a/posthog/models/test/test_async_deletion_model.py b/posthog/models/test/test_async_deletion_model.py index c1f94dc825ed0..abb057fd6b9fa 100644 --- a/posthog/models/test/test_async_deletion_model.py +++ b/posthog/models/test/test_async_deletion_model.py @@ -35,7 +35,10 @@ def setUp(self): @snapshot_clickhouse_queries def test_mark_team_deletions_done(self): deletion = AsyncDeletion.objects.create( - deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user + deletion_type=DeletionType.Team, + team_id=self.teams[0].pk, + key=str(self.teams[0].pk), + created_by=self.user, ) AsyncEventDeletion().mark_deletions_done() @@ -48,7 +51,10 @@ def test_mark_deletions_done_team_when_not_done(self): _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1") deletion = AsyncDeletion.objects.create( - deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user + deletion_type=DeletionType.Team, + team_id=self.teams[0].pk, + key=str(self.teams[0].pk), + created_by=self.user, ) AsyncEventDeletion().mark_deletions_done() @@ -58,11 +64,26 @@ def test_mark_deletions_done_team_when_not_done(self): @snapshot_clickhouse_queries def test_mark_deletions_done_person(self): - _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", person_id=uuid2) - _create_event(event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1", person_id=uuid) + _create_event( + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + person_id=uuid2, + ) + _create_event( + event_uuid=uuid4(), + event="event1", + team=self.teams[1], + distinct_id="1", + person_id=uuid, + ) deletion = AsyncDeletion.objects.create( - deletion_type=DeletionType.Person, team_id=self.teams[0].pk, key=str(uuid), created_by=self.user + deletion_type=DeletionType.Person, + team_id=self.teams[0].pk, + key=str(uuid), + created_by=self.user, ) AsyncEventDeletion().mark_deletions_done() @@ -72,10 +93,19 @@ def test_mark_deletions_done_person(self): @snapshot_clickhouse_queries def test_mark_deletions_done_person_when_not_done(self): - _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", person_id=uuid) + _create_event( + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + person_id=uuid, + ) deletion = AsyncDeletion.objects.create( - deletion_type=DeletionType.Person, team_id=self.teams[0].pk, key=str(uuid), created_by=self.user + deletion_type=DeletionType.Person, + team_id=self.teams[0].pk, + key=str(uuid), + created_by=self.user, ) AsyncEventDeletion().mark_deletions_done() @@ -86,13 +116,25 @@ def test_mark_deletions_done_person_when_not_done(self): @snapshot_clickhouse_queries def test_mark_deletions_done_groups(self): _create_event( - event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_1": "foo"} + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + properties={"$group_1": "foo"}, ) _create_event( - event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_0": "bar"} + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + properties={"$group_0": "bar"}, ) _create_event( - event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1", properties={"$group_0": "foo"} + event_uuid=uuid4(), + event="event1", + team=self.teams[1], + distinct_id="1", + properties={"$group_0": "foo"}, ) deletion = AsyncDeletion.objects.create( @@ -111,7 +153,11 @@ def test_mark_deletions_done_groups(self): @snapshot_clickhouse_queries def test_mark_deletions_done_groups_when_not_done(self): _create_event( - event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_0": "foo"} + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + properties={"$group_0": "foo"}, ) deletion = AsyncDeletion.objects.create( @@ -132,7 +178,10 @@ def test_delete_teams(self): _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1") AsyncDeletion.objects.create( - deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user + deletion_type=DeletionType.Team, + team_id=self.teams[0].pk, + key=str(self.teams[0].pk), + created_by=self.user, ) AsyncEventDeletion().run() @@ -144,7 +193,10 @@ def test_delete_teams_unrelated(self): _create_event(event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1") AsyncDeletion.objects.create( - deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user + deletion_type=DeletionType.Team, + team_id=self.teams[0].pk, + key=str(self.teams[0].pk), + created_by=self.user, ) AsyncEventDeletion().run() @@ -153,10 +205,19 @@ def test_delete_teams_unrelated(self): @snapshot_clickhouse_alter_queries def test_delete_person(self): - _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", person_id=uuid) + _create_event( + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + person_id=uuid, + ) AsyncDeletion.objects.create( - deletion_type=DeletionType.Person, team_id=self.teams[0].pk, key=str(uuid), created_by=self.user + deletion_type=DeletionType.Person, + team_id=self.teams[0].pk, + key=str(uuid), + created_by=self.user, ) AsyncEventDeletion().run() @@ -165,11 +226,26 @@ def test_delete_person(self): @snapshot_clickhouse_alter_queries def test_delete_person_unrelated(self): - _create_event(event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", person_id=uuid2) - _create_event(event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1", person_id=uuid) + _create_event( + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + person_id=uuid2, + ) + _create_event( + event_uuid=uuid4(), + event="event1", + team=self.teams[1], + distinct_id="1", + person_id=uuid, + ) AsyncDeletion.objects.create( - deletion_type=DeletionType.Person, team_id=self.teams[0].pk, key=str(uuid), created_by=self.user + deletion_type=DeletionType.Person, + team_id=self.teams[0].pk, + key=str(uuid), + created_by=self.user, ) AsyncEventDeletion().run() @@ -179,7 +255,11 @@ def test_delete_person_unrelated(self): @snapshot_clickhouse_alter_queries def test_delete_group(self): _create_event( - event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_0": "foo"} + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + properties={"$group_0": "foo"}, ) AsyncDeletion.objects.create( @@ -197,13 +277,25 @@ def test_delete_group(self): @snapshot_clickhouse_alter_queries def test_delete_group_unrelated(self): _create_event( - event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_1": "foo"} + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + properties={"$group_1": "foo"}, ) _create_event( - event_uuid=uuid4(), event="event1", team=self.teams[0], distinct_id="1", properties={"$group_0": "bar"} + event_uuid=uuid4(), + event="event1", + team=self.teams[0], + distinct_id="1", + properties={"$group_0": "bar"}, ) _create_event( - event_uuid=uuid4(), event="event1", team=self.teams[1], distinct_id="1", properties={"$group_0": "foo"} + event_uuid=uuid4(), + event="event1", + team=self.teams[1], + distinct_id="1", + properties={"$group_0": "foo"}, ) AsyncDeletion.objects.create( @@ -222,7 +314,12 @@ def test_delete_group_unrelated(self): def test_delete_auxilary_models_via_team(self): create_person(team_id=self.teams[0].pk, properties={"x": 0}, version=0, uuid=uuid) create_person_distinct_id(self.teams[0].pk, "0", uuid) - create_group(team_id=self.teams[0].pk, group_type_index=0, group_key="org:5", properties={}) + create_group( + team_id=self.teams[0].pk, + group_type_index=0, + group_key="org:5", + properties={}, + ) insert_static_cohort([uuid4()], 0, self.teams[0]) self._insert_cohortpeople_row(self.teams[0], uuid4(), 3) create_plugin_log_entry( @@ -236,7 +333,10 @@ def test_delete_auxilary_models_via_team(self): ) AsyncDeletion.objects.create( - deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user + deletion_type=DeletionType.Team, + team_id=self.teams[0].pk, + key=str(self.teams[0].pk), + created_by=self.user, ) AsyncEventDeletion().run() @@ -252,7 +352,12 @@ def test_delete_auxilary_models_via_team(self): def test_delete_auxilary_models_via_team_unrelated(self): create_person(team_id=self.teams[1].pk, properties={"x": 0}, version=0, uuid=uuid) create_person_distinct_id(self.teams[1].pk, "0", uuid) - create_group(team_id=self.teams[1].pk, group_type_index=0, group_key="org:5", properties={}) + create_group( + team_id=self.teams[1].pk, + group_type_index=0, + group_key="org:5", + properties={}, + ) insert_static_cohort([uuid4()], 0, self.teams[1]) self._insert_cohortpeople_row(self.teams[1], uuid4(), 3) create_plugin_log_entry( @@ -266,7 +371,10 @@ def test_delete_auxilary_models_via_team_unrelated(self): ) AsyncDeletion.objects.create( - deletion_type=DeletionType.Team, team_id=self.teams[0].pk, key=str(self.teams[0].pk), created_by=self.user + deletion_type=DeletionType.Team, + team_id=self.teams[0].pk, + key=str(self.teams[0].pk), + created_by=self.user, ) AsyncEventDeletion().run() @@ -284,7 +392,10 @@ def test_delete_cohortpeople(self): self._insert_cohortpeople_row(team, uuid4(), cohort_id) AsyncDeletion.objects.create( - deletion_type=DeletionType.Cohort_full, team_id=team.pk, key=str(cohort_id) + "_0", created_by=self.user + deletion_type=DeletionType.Cohort_full, + team_id=team.pk, + key=str(cohort_id) + "_0", + created_by=self.user, ) AsyncCohortDeletion().run() @@ -298,7 +409,10 @@ def test_delete_cohortpeople_version(self): self._insert_cohortpeople_row(team, uuid4(), cohort_id, 3) AsyncDeletion.objects.create( - deletion_type=DeletionType.Cohort_stale, team_id=team.pk, key=str(cohort_id) + "_3", created_by=self.user + deletion_type=DeletionType.Cohort_stale, + team_id=team.pk, + key=str(cohort_id) + "_3", + created_by=self.user, ) AsyncCohortDeletion().run() @@ -314,5 +428,10 @@ def _insert_cohortpeople_row(self, team: Team, person_id: UUID, cohort_id: int, INSERT INTO cohortpeople (person_id, cohort_id, team_id, sign, version) VALUES (%(person_id)s, %(cohort_id)s, %(team_id)s, 1, %(version)s) """, - {"person_id": str(person_id), "cohort_id": cohort_id, "team_id": team.pk, "version": version}, + { + "person_id": str(person_id), + "cohort_id": cohort_id, + "team_id": team.pk, + "version": version, + }, ) diff --git a/posthog/models/test/test_dashboard_tile_model.py b/posthog/models/test/test_dashboard_tile_model.py index fe88e813c1181..be13ba06975c3 100644 --- a/posthog/models/test/test_dashboard_tile_model.py +++ b/posthog/models/test/test_dashboard_tile_model.py @@ -5,7 +5,11 @@ from django.db.utils import IntegrityError from posthog.models.dashboard import Dashboard -from posthog.models.dashboard_tile import DashboardTile, Text, get_tiles_ordered_by_position +from posthog.models.dashboard_tile import ( + DashboardTile, + Text, + get_tiles_ordered_by_position, +) from posthog.models.exported_asset import ExportedAsset from posthog.models.insight import Insight from posthog.test.base import APIBaseTest @@ -60,7 +64,6 @@ def test_cannot_add_a_tile_with_insight_and_text_on_validation(self) -> None: DashboardTile.objects.create(dashboard=self.dashboard, insight=insight, text=text) def test_cannot_set_caching_data_for_text_tiles(self) -> None: - tile_fields: List[Dict] = [ {"filters_hash": "123"}, {"refreshing": True}, diff --git a/posthog/models/test/test_entity_model.py b/posthog/models/test/test_entity_model.py index 55d0de18fc01a..c11e5bd99e9fd 100644 --- a/posthog/models/test/test_entity_model.py +++ b/posthog/models/test/test_entity_model.py @@ -1,6 +1,10 @@ from django.test import TestCase -from posthog.models.entity import TREND_FILTER_TYPE_ACTIONS, TREND_FILTER_TYPE_EVENTS, Entity +from posthog.models.entity import ( + TREND_FILTER_TYPE_ACTIONS, + TREND_FILTER_TYPE_EVENTS, + Entity, +) class TestEntity(TestCase): @@ -16,7 +20,11 @@ def test_inclusion(self): "type": TREND_FILTER_TYPE_EVENTS, "properties": [ {"key": "email", "value": "test@posthog.com", "type": "person"}, - {"key": "current_url", "value": "test@posthog.com", "type": "element"}, + { + "key": "current_url", + "value": "test@posthog.com", + "type": "element", + }, ], } ) @@ -24,7 +32,13 @@ def test_inclusion(self): { "id": "e1", "type": TREND_FILTER_TYPE_EVENTS, - "properties": [{"key": "current_url", "value": "test@posthog.com", "type": "element"}], + "properties": [ + { + "key": "current_url", + "value": "test@posthog.com", + "type": "element", + } + ], } ) @@ -38,7 +52,11 @@ def test_inclusion_unordered(self): "type": TREND_FILTER_TYPE_EVENTS, "properties": [ {"key": "browser", "value": "chrome", "type": "person"}, - {"key": "current_url", "value": "test@posthog.com", "type": "element"}, + { + "key": "current_url", + "value": "test@posthog.com", + "type": "element", + }, {"key": "email", "value": "test@posthog.com", "type": "person"}, ], } @@ -47,7 +65,13 @@ def test_inclusion_unordered(self): { "id": "e1", "type": TREND_FILTER_TYPE_EVENTS, - "properties": [{"key": "current_url", "value": "test@posthog.com", "type": "element"}], + "properties": [ + { + "key": "current_url", + "value": "test@posthog.com", + "type": "element", + } + ], } ) @@ -55,7 +79,6 @@ def test_inclusion_unordered(self): self.assertFalse(entity1.is_superset(entity2)) def test_equality_with_ids(self): - entity1 = Entity({"id": "e1", "type": TREND_FILTER_TYPE_ACTIONS}) entity2 = Entity({"id": "e1", "type": TREND_FILTER_TYPE_ACTIONS}) @@ -83,7 +106,11 @@ def test_equality_with_simple_properties(self): "type": TREND_FILTER_TYPE_EVENTS, "properties": [ {"key": "email", "value": "test@posthog.com", "type": "person"}, - {"key": "current_url", "value": "test@posthog.com", "type": "element"}, + { + "key": "current_url", + "value": "test@posthog.com", + "type": "element", + }, ], } ) @@ -92,7 +119,11 @@ def test_equality_with_simple_properties(self): "id": "e1", "type": TREND_FILTER_TYPE_EVENTS, "properties": [ - {"key": "current_url", "value": "test@posthog.com", "type": "element"}, + { + "key": "current_url", + "value": "test@posthog.com", + "type": "element", + }, {"key": "email", "value": "test@posthog.com", "type": "person"}, ], } @@ -105,7 +136,11 @@ def test_equality_with_simple_properties(self): "id": "e1", "type": TREND_FILTER_TYPE_EVENTS, "properties": [ - {"key": "current$url", "value": "test@posthog.com", "type": "element"}, + { + "key": "current$url", + "value": "test@posthog.com", + "type": "element", + }, {"key": "email", "value": "test@posthog.com", "type": "person"}, ], } @@ -120,8 +155,18 @@ def test_equality_with_complex_operator_properties(self): "type": TREND_FILTER_TYPE_EVENTS, "properties": [ {"key": "count", "operator": "lt", "value": 12, "type": "element"}, - {"key": "email", "operator": "in", "value": ["a, b"], "type": "person"}, - {"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"}, + { + "key": "email", + "operator": "in", + "value": ["a, b"], + "type": "person", + }, + { + "key": "selector", + "value": [".btn"], + "operator": "exact", + "type": "element", + }, {"key": "test_prop", "value": 1.2, "operator": "gt"}, ], } @@ -133,8 +178,18 @@ def test_equality_with_complex_operator_properties(self): "properties": [ {"key": "test_prop", "value": 1.20, "operator": "gt"}, {"key": "count", "operator": "lt", "value": 12, "type": "element"}, - {"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"}, - {"key": "email", "operator": "in", "value": ["a, b"], "type": "person"}, + { + "key": "selector", + "value": [".btn"], + "operator": "exact", + "type": "element", + }, + { + "key": "email", + "operator": "in", + "value": ["a, b"], + "type": "person", + }, ], } ) @@ -149,8 +204,18 @@ def test_equality_with_complex_operator_properties(self): "properties": [ {"key": "test_prop", "value": 1.200, "operator": "gt"}, {"key": "count", "operator": "lt", "value": 12, "type": "element"}, - {"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"}, - {"key": "email", "operator": "in", "value": ["a, b"], "type": "person"}, + { + "key": "selector", + "value": [".btn"], + "operator": "exact", + "type": "element", + }, + { + "key": "email", + "operator": "in", + "value": ["a, b"], + "type": "person", + }, ], } ) @@ -164,8 +229,18 @@ def test_equality_with_complex_operator_properties(self): "properties": [ {"key": "test_prop", "value": 1.2001, "operator": "gt"}, {"key": "count", "operator": "lt", "value": 12, "type": "element"}, - {"key": "selector", "value": [".btn"], "operator": "exact", "type": "element"}, - {"key": "email", "operator": "in", "value": ["a, b"], "type": "person"}, + { + "key": "selector", + "value": [".btn"], + "operator": "exact", + "type": "element", + }, + { + "key": "email", + "operator": "in", + "value": ["a, b"], + "type": "person", + }, ], } ) @@ -173,9 +248,19 @@ def test_equality_with_complex_operator_properties(self): self.assertFalse(entity1.equals(entity2)) def test_equality_with_old_style_and_new_style_properties(self): - entity1 = Entity({"id": "e1", "type": TREND_FILTER_TYPE_EVENTS, "properties": {"key": "value"}}) + entity1 = Entity( + { + "id": "e1", + "type": TREND_FILTER_TYPE_EVENTS, + "properties": {"key": "value"}, + } + ) entity2 = Entity( - {"id": "e1", "type": TREND_FILTER_TYPE_EVENTS, "properties": [{"key": "key", "value": "value"}]} + { + "id": "e1", + "type": TREND_FILTER_TYPE_EVENTS, + "properties": [{"key": "key", "value": "value"}], + } ) self.assertTrue(entity1.equals(entity2)) diff --git a/posthog/models/test/test_event_model.py b/posthog/models/test/test_event_model.py index d5343e9141949..8c0f2ab8994b3 100644 --- a/posthog/models/test/test_event_model.py +++ b/posthog/models/test/test_event_model.py @@ -20,7 +20,10 @@ def test_filter_with_selector_direct_decendant_ordering(self): self.team, [ {"event": "$autocapture", "selector": "div > div > a"}, - {"event": "$autocapture", "selector": "div > a.somethingthatdoesntexist"}, + { + "event": "$autocapture", + "selector": "div > a.somethingthatdoesntexist", + }, ], ) @@ -28,7 +31,10 @@ def test_filter_with_selector_direct_decendant_ordering(self): def test_filter_with_selector_nth_child(self): all_events = self._setup_action_selector_events() - action = _create_action(self.team, [{"event": "$autocapture", "selector": "div > a:nth-child(2)"}]) + action = _create_action( + self.team, + [{"event": "$autocapture", "selector": "div > a:nth-child(2)"}], + ) self.assertActionEventsMatch(action, [all_events[1]]) @@ -58,7 +64,13 @@ def _setup_action_selector_events(self): team=self.team, distinct_id="whatever", elements=[ - Element(tag_name="a", href="/a-url", nth_child=1, nth_of_type=0, attr_class=["one-class"]), + Element( + tag_name="a", + href="/a-url", + nth_child=1, + nth_of_type=0, + attr_class=["one-class"], + ), Element(tag_name="button", nth_child=0, nth_of_type=0), Element(tag_name="div", nth_child=0, nth_of_type=0), Element(tag_name="div", nth_child=0, nth_of_type=0, attr_id="nested"), @@ -126,14 +138,30 @@ def test_with_normal_filters(self): team=self.team, event="$autocapture", distinct_id="whatever", - elements=[Element(tag_name="a", href="/a-url", text="some_text", nth_child=0, nth_of_type=0)], + elements=[ + Element( + tag_name="a", + href="/a-url", + text="some_text", + nth_child=0, + nth_of_type=0, + ) + ], ) event2_uuid = _create_event( team=self.team, event="$autocapture", distinct_id="whatever2", - elements=[Element(tag_name="a", href="/a-url", text="some_text", nth_child=0, nth_of_type=0)], + elements=[ + Element( + tag_name="a", + href="/a-url", + text="some_text", + nth_child=0, + nth_of_type=0, + ) + ], ) event3_uuid = _create_event( @@ -141,9 +169,20 @@ def test_with_normal_filters(self): event="$autocapture", distinct_id="whatever", elements=[ - Element(tag_name="a", href="/a-url-2", text="some_other_text", nth_child=0, nth_of_type=0), + Element( + tag_name="a", + href="/a-url-2", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ), # make sure elements don't get double counted if they're part of the same event - Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0), + Element( + tag_name="div", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ), ], ) @@ -152,9 +191,20 @@ def test_with_normal_filters(self): event="$autocapture", distinct_id="whatever2", elements=[ - Element(tag_name="a", href="/a-url-2", text="some_other_text", nth_child=0, nth_of_type=0), + Element( + tag_name="a", + href="/a-url-2", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ), # make sure elements don't get double counted if they're part of the same event - Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0), + Element( + tag_name="div", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ), ], ) @@ -163,13 +213,29 @@ def test_with_normal_filters(self): team=team2, event="$autocapture", distinct_id="whatever2", - elements=[Element(tag_name="a", href="/a-url", text="some_other_text", nth_child=0, nth_of_type=0)], + elements=[ + Element( + tag_name="a", + href="/a-url", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ) + ], ) _create_event( team=team2, event="$autocapture", distinct_id="whatever2", - elements=[Element(tag_name="a", href="/a-url-2", text="some_other_text", nth_child=0, nth_of_type=0)], + elements=[ + Element( + tag_name="a", + href="/a-url-2", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ) + ], ) events = _get_events_for_action(action1) @@ -184,14 +250,26 @@ def test_with_href_contains(self): action1 = Action.objects.create(team=self.team) ActionStep.objects.create( - event="$autocapture", action=action1, href="/a-url", href_matching="contains", selector="a" + event="$autocapture", + action=action1, + href="/a-url", + href_matching="contains", + selector="a", ) event1_uuid = _create_event( team=self.team, event="$autocapture", distinct_id="whatever", - elements=[Element(tag_name="a", href="/a-url", text="some_text", nth_child=0, nth_of_type=0)], + elements=[ + Element( + tag_name="a", + href="/a-url", + text="some_text", + nth_child=0, + nth_of_type=0, + ) + ], ) event2_uuid = _create_event( @@ -199,7 +277,13 @@ def test_with_href_contains(self): event="$autocapture", distinct_id="whatever2", elements=[ - Element(tag_name="a", href="https://google.com/a-url", text="some_text", nth_child=0, nth_of_type=0) + Element( + tag_name="a", + href="https://google.com/a-url", + text="some_text", + nth_child=0, + nth_of_type=0, + ) ], ) @@ -208,9 +292,20 @@ def test_with_href_contains(self): event="$autocapture", distinct_id="whatever", elements=[ - Element(tag_name="a", href="/a-url-2", text="some_other_text", nth_child=0, nth_of_type=0), + Element( + tag_name="a", + href="/a-url-2", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ), # make sure elements don't get double counted if they're part of the same event - Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0), + Element( + tag_name="div", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ), ], ) @@ -219,9 +314,20 @@ def test_with_href_contains(self): event="$autocapture", distinct_id="whatever2", elements=[ - Element(tag_name="a", href="/b-url", text="some_other_text", nth_child=0, nth_of_type=0), + Element( + tag_name="a", + href="/b-url", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ), # make sure elements don't get double counted if they're part of the same event - Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0), + Element( + tag_name="div", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ), ], ) @@ -234,7 +340,12 @@ def test_with_href_contains(self): def test_with_class(self): _create_person(distinct_ids=["whatever"], team=self.team) action1 = Action.objects.create(team=self.team) - ActionStep.objects.create(event="$autocapture", action=action1, selector="a.nav-link.active", tag_name="a") + ActionStep.objects.create( + event="$autocapture", + action=action1, + selector="a.nav-link.active", + tag_name="a", + ) event1_uuid = _create_event( event="$autocapture", team=self.team, @@ -250,7 +361,10 @@ def test_with_class(self): event="$autocapture", team=self.team, distinct_id="whatever", - elements=[Element(tag_name="span", attr_class=None), Element(tag_name="a", attr_class=None)], + elements=[ + Element(tag_name="span", attr_class=None), + Element(tag_name="a", attr_class=None), + ], ) events = _get_events_for_action(action1) @@ -260,7 +374,12 @@ def test_with_class(self): def test_with_class_with_escaped_symbols(self): _create_person(distinct_ids=["whatever"], team=self.team) action1 = Action.objects.create(team=self.team) - ActionStep.objects.create(event="$autocapture", action=action1, selector="a.na\\v-link:b@ld", tag_name="a") + ActionStep.objects.create( + event="$autocapture", + action=action1, + selector="a.na\\v-link:b@ld", + tag_name="a", + ) event1_uuid = _create_event( event="$autocapture", team=self.team, @@ -279,7 +398,10 @@ def test_with_class_with_escaped_slashes(self): _create_person(distinct_ids=["whatever"], team=self.team) action1 = Action.objects.create(team=self.team) ActionStep.objects.create( - event="$autocapture", action=action1, selector="a.na\\\\\\v-link:b@ld", tag_name="a" + event="$autocapture", + action=action1, + selector="a.na\\\\\\v-link:b@ld", + tag_name="a", ) event1_uuid = _create_event( event="$autocapture", @@ -323,15 +445,28 @@ def test_filter_events_by_url(self): ActionStep.objects.create(event="$autocapture", action=action1, href="/a-url-2") action2 = Action.objects.create(team=self.team) - ActionStep.objects.create(event="$autocapture", action=action2, url="123", url_matching=ActionStep.CONTAINS) + ActionStep.objects.create( + event="$autocapture", + action=action2, + url="123", + url_matching=ActionStep.CONTAINS, + ) action3 = Action.objects.create(team=self.team) ActionStep.objects.create( - event="$autocapture", action=action3, url="https://posthog.com/%/123", url_matching=ActionStep.CONTAINS + event="$autocapture", + action=action3, + url="https://posthog.com/%/123", + url_matching=ActionStep.CONTAINS, ) action4 = Action.objects.create(team=self.team) - ActionStep.objects.create(event="$autocapture", action=action4, url="/123$", url_matching=ActionStep.REGEX) + ActionStep.objects.create( + event="$autocapture", + action=action4, + url="/123$", + url_matching=ActionStep.REGEX, + ) _create_event(team=self.team, distinct_id="whatever", event="$autocapture") event2_uuid = _create_event( @@ -339,7 +474,14 @@ def test_filter_events_by_url(self): team=self.team, distinct_id="whatever", properties={"$current_url": "https://posthog.com/feedback/123"}, - elements=[Element(tag_name="div", text="some_other_text", nth_child=0, nth_of_type=0)], + elements=[ + Element( + tag_name="div", + text="some_other_text", + nth_child=0, + nth_of_type=0, + ) + ], ) events = _get_events_for_action(action1) @@ -360,7 +502,12 @@ def test_filter_events_by_url(self): def test_person_with_different_distinct_id(self): action_watch_movie = Action.objects.create(team=self.team, name="watched movie") - ActionStep.objects.create(action=action_watch_movie, tag_name="a", href="/movie", event="$autocapture") + ActionStep.objects.create( + action=action_watch_movie, + tag_name="a", + href="/movie", + event="$autocapture", + ) _create_person(distinct_ids=["anonymous_user", "is_now_signed_up"], team=self.team) _create_event( @@ -396,13 +543,19 @@ def test_no_person_leakage_from_other_teams(self): self.assertEqual(events[0].distinct_id, "anonymous_user") def test_person_property(self): - _create_person(team=self.team, distinct_ids=["person1"], properties={"$browser": "Chrome"}) + _create_person( + team=self.team, + distinct_ids=["person1"], + properties={"$browser": "Chrome"}, + ) _create_person(team=self.team, distinct_ids=["person2"]) _create_event(event="$pageview", distinct_id="person1", team=self.team) _create_event(event="$pageview", distinct_id="person2", team=self.team) action = Action.objects.create(name="pageview", team=self.team) ActionStep.objects.create( - action=action, event="$pageview", properties=[{"key": "$browser", "value": "Chrome", "type": "person"}] + action=action, + event="$pageview", + properties=[{"key": "$browser", "value": "Chrome", "type": "person"}], ) events = _get_events_for_action(action) self.assertEqual(len(events), 1) @@ -482,7 +635,10 @@ def test_selector_attribute(self): self.assertEqual(selector1.parts[0].direct_descendant, False) self.assertEqual(selector1.parts[0].unique_order, 0) - self.assertEqual(selector1.parts[1].data, {"tag_name": "div", "attributes__attr__data-id": "5"}) + self.assertEqual( + selector1.parts[1].data, + {"tag_name": "div", "attributes__attr__data-id": "5"}, + ) self.assertEqual(selector1.parts[1].direct_descendant, True) self.assertEqual(selector1.parts[1].unique_order, 0) @@ -518,7 +674,10 @@ def test_class(self): self.assertEqual(selector1.parts[0].direct_descendant, False) self.assertEqual(selector1.parts[0].unique_order, 0) - self.assertEqual(selector1.parts[1].data, {"tag_name": "div", "attr_class__contains": ["classone", "classtwo"]}) + self.assertEqual( + selector1.parts[1].data, + {"tag_name": "div", "attr_class__contains": ["classone", "classtwo"]}, + ) self.assertEqual(selector1.parts[1].direct_descendant, True) self.assertEqual(selector1.parts[1].unique_order, 0) diff --git a/posthog/models/test/test_exported_asset_model.py b/posthog/models/test/test_exported_asset_model.py index 40337c6c50635..f17808caadd54 100644 --- a/posthog/models/test/test_exported_asset_model.py +++ b/posthog/models/test/test_exported_asset_model.py @@ -68,7 +68,10 @@ def test_delete_expired_assets(self) -> None: ExportedAsset.delete_expired_assets() - assert list(ExportedAsset.objects.all()) == [asset_that_is_not_expired, asset_that_has_no_expiry] + assert list(ExportedAsset.objects.all()) == [ + asset_that_is_not_expired, + asset_that_has_no_expiry, + ] assert list(ExportedAsset.objects_including_ttl_deleted.all()) == [ asset_that_is_not_expired, asset_that_has_no_expiry, diff --git a/posthog/models/test/test_insight_caching_state.py b/posthog/models/test/test_insight_caching_state.py index 65b4086f9443f..2727d67f582f6 100644 --- a/posthog/models/test/test_insight_caching_state.py +++ b/posthog/models/test/test_insight_caching_state.py @@ -2,7 +2,13 @@ from django.utils.timezone import now -from posthog.models import Dashboard, DashboardTile, Insight, InsightCachingState, SharingConfiguration +from posthog.models import ( + Dashboard, + DashboardTile, + Insight, + InsightCachingState, + SharingConfiguration, +) from posthog.models.signals import mute_selected_signals from posthog.test.base import BaseTest diff --git a/posthog/models/test/test_insight_model.py b/posthog/models/test/test_insight_model.py index 08d82d0a416ac..2519b8a79cb0a 100644 --- a/posthog/models/test/test_insight_model.py +++ b/posthog/models/test/test_insight_model.py @@ -102,7 +102,8 @@ def test_dashboard_with_date_from_changes_filters_hash(self) -> None: def test_query_hash_matches_same_query_source(self) -> None: insight_with_query_at_top_level = Insight.objects.create(team=self.team, query={"kind": "EventsQuery"}) insight_with_query_in_source = Insight.objects.create( - team=self.team, query={"kind": "DataTable", "source": {"kind": "EventsQuery"}} + team=self.team, + query={"kind": "DataTable", "source": {"kind": "EventsQuery"}}, ) filters_hash_one = generate_insight_cache_key(insight_with_query_at_top_level, None) @@ -141,25 +142,37 @@ def test_dashboard_with_query_insight_and_filters(self) -> None: # test that query filters are equal when there are no dashboard filters {"dateRange": {"date_from": "-14d", "date_to": "-7d"}}, {}, - {"dateRange": {"date_from": "-14d", "date_to": "-7d"}, "properties": None}, + { + "dateRange": {"date_from": "-14d", "date_to": "-7d"}, + "properties": None, + }, ), ( # test that dashboard filters are used when there are no query filters {}, {"date_from": "-14d", "date_to": "-7d"}, - {"dateRange": {"date_from": "-14d", "date_to": "-7d"}, "properties": None}, + { + "dateRange": {"date_from": "-14d", "date_to": "-7d"}, + "properties": None, + }, ), ( # test that dashboard filters take priority {"dateRange": {"date_from": "-2d", "date_to": "-1d"}}, {"date_from": "-4d", "date_to": "-3d"}, - {"dateRange": {"date_from": "-4d", "date_to": "-3d"}, "properties": None}, + { + "dateRange": {"date_from": "-4d", "date_to": "-3d"}, + "properties": None, + }, ), ( # test that dashboard filters take priority, even if only one value is set, the other is set to None {"dateRange": {"date_from": "-14d", "date_to": "-7d"}}, {"date_from": "all"}, - {"dateRange": {"date_from": "all", "date_to": None}, "properties": None}, + { + "dateRange": {"date_from": "all", "date_to": None}, + "properties": None, + }, ), ( # test that if no filters are set then none are outputted @@ -171,13 +184,19 @@ def test_dashboard_with_query_insight_and_filters(self) -> None: # test that properties from the query are used when there are no dashboard properties {"properties": [browser_equals_firefox]}, {}, - {"dateRange": {"date_from": None, "date_to": None}, "properties": [browser_equals_firefox]}, + { + "dateRange": {"date_from": None, "date_to": None}, + "properties": [browser_equals_firefox], + }, ), ( # test that properties from the dashboard are used when there are no query properties {}, {"properties": [browser_equals_chrome]}, - {"dateRange": {"date_from": None, "date_to": None}, "properties": [browser_equals_chrome]}, + { + "dateRange": {"date_from": None, "date_to": None}, + "properties": [browser_equals_chrome], + }, ), ( # test that properties are merged when set in both query and dashboard diff --git a/posthog/models/test/test_organization_model.py b/posthog/models/test/test_organization_model.py index 0f9c29904e4ab..f140dcc862f26 100644 --- a/posthog/models/test/test_organization_model.py +++ b/posthog/models/test/test_organization_model.py @@ -29,16 +29,19 @@ def test_plugins_are_preinstalled_on_self_hosted(self, mock_get): with self.is_cloud(False): with self.settings(PLUGINS_PREINSTALLED_URLS=["https://github.com/PostHog/helloworldplugin/"]): new_org, _, _ = Organization.objects.bootstrap( - self.user, plugins_access_level=Organization.PluginsAccessLevel.INSTALL + self.user, + plugins_access_level=Organization.PluginsAccessLevel.INSTALL, ) self.assertEqual(Plugin.objects.filter(organization=new_org, is_preinstalled=True).count(), 1) self.assertEqual( - Plugin.objects.filter(organization=new_org, is_preinstalled=True).get().name, "helloworldplugin" + Plugin.objects.filter(organization=new_org, is_preinstalled=True).get().name, + "helloworldplugin", ) self.assertEqual(mock_get.call_count, 2) mock_get.assert_any_call( - f"https://github.com/PostHog/helloworldplugin/archive/{HELLO_WORLD_PLUGIN_GITHUB_ZIP[0]}.zip", headers={} + f"https://github.com/PostHog/helloworldplugin/archive/{HELLO_WORLD_PLUGIN_GITHUB_ZIP[0]}.zip", + headers={}, ) @mock.patch("requests.get", side_effect=mocked_plugin_requests_get) @@ -46,7 +49,8 @@ def test_plugins_are_not_preinstalled_on_cloud(self, mock_get): with self.is_cloud(True): with self.settings(PLUGINS_PREINSTALLED_URLS=["https://github.com/PostHog/helloworldplugin/"]): new_org, _, _ = Organization.objects.bootstrap( - self.user, plugins_access_level=Organization.PluginsAccessLevel.INSTALL + self.user, + plugins_access_level=Organization.PluginsAccessLevel.INSTALL, ) self.assertEqual(Plugin.objects.filter(organization=new_org, is_preinstalled=True).count(), 0) diff --git a/posthog/models/test/test_person_override_model.py b/posthog/models/test/test_person_override_model.py index e3365adaf524a..13f3b0a8511ab 100644 --- a/posthog/models/test/test_person_override_model.py +++ b/posthog/models/test/test_person_override_model.py @@ -416,7 +416,13 @@ def create_connection(alias=DEFAULT_DB_ALIAS): def _merge_people( - team, cursor, old_person_uuid, override_person_uuid, oldest_event, can_lock_event=None, done_event=None + team, + cursor, + old_person_uuid, + override_person_uuid, + oldest_event, + can_lock_event=None, + done_event=None, ): """ Merge two people together, using the override_person_id as the canonical @@ -592,7 +598,13 @@ def test_person_override_allow_consecutive_merges(people, team, oldest_event): with create_connection() as second_cursor: second_cursor.execute("BEGIN") - _merge_people(team, second_cursor, override_person.uuid, new_override_person.uuid, oldest_event) + _merge_people( + team, + second_cursor, + override_person.uuid, + new_override_person.uuid, + oldest_event, + ) second_cursor.execute("COMMIT") assert [_[0] for _ in PersonOverrideMapping.objects.all().values_list("uuid")] == [ @@ -648,12 +660,24 @@ def test_person_override_disallows_concurrent_merge(people, team, oldest_event): done_t2_event = Event() t1 = Thread( target=_merge_people, - args=(team, first_cursor, old_person.uuid, override_person.uuid, oldest_event), + args=( + team, + first_cursor, + old_person.uuid, + override_person.uuid, + oldest_event, + ), kwargs={"can_lock_event": can_lock_event, "done_event": done_t1_event}, ) t2 = Thread( target=_merge_people, - args=(team, second_cursor, override_person.uuid, new_override_person.uuid, oldest_event), + args=( + team, + second_cursor, + override_person.uuid, + new_override_person.uuid, + oldest_event, + ), kwargs={"done_event": done_t2_event}, ) t1.start() @@ -708,12 +732,24 @@ def test_person_override_disallows_concurrent_merge_different_order(people, team done_t2_event = Event() t1 = Thread( target=_merge_people, - args=(team, first_cursor, old_person.uuid, override_person.uuid, oldest_event), + args=( + team, + first_cursor, + old_person.uuid, + override_person.uuid, + oldest_event, + ), kwargs={"done_event": done_t1_event}, ) t2 = Thread( target=_merge_people, - args=(team, second_cursor, override_person.uuid, new_override_person.uuid, oldest_event), + args=( + team, + second_cursor, + override_person.uuid, + new_override_person.uuid, + oldest_event, + ), kwargs={"can_lock_event": can_lock_event, "done_event": done_t2_event}, ) t1.start() diff --git a/posthog/models/test/test_subscription_model.py b/posthog/models/test/test_subscription_model.py index bc9bf583e6f15..8552d8bca795a 100644 --- a/posthog/models/test/test_subscription_model.py +++ b/posthog/models/test/test_subscription_model.py @@ -80,7 +80,12 @@ def test_generating_token(self): token = get_unsubscribe_token(subscription, "test2@posthog.com") assert token.startswith("ey") - info = jwt.decode(token, "not-so-secret", audience=PosthogJwtAudience.UNSUBSCRIBE.value, algorithms=["HS256"]) + info = jwt.decode( + token, + "not-so-secret", + audience=PosthogJwtAudience.UNSUBSCRIBE.value, + algorithms=["HS256"], + ) assert info["id"] == subscription.id assert info["email"] == "test2@posthog.com" @@ -137,7 +142,10 @@ def test_unsubscribe_deletes_subscription_if_last_subscriber(self): def test_complex_rrule_configuration(self): # Equivalent to last monday and wednesday of every other month subscription = self._create_insight_subscription( - interval=2, frequency="monthly", bysetpos=-1, byweekday=["wednesday", "friday"] + interval=2, + frequency="monthly", + bysetpos=-1, + byweekday=["wednesday", "friday"], ) # Last wed or fri of 01.22 is Wed 28th @@ -156,7 +164,15 @@ def test_should_work_for_nth_days(self): interval=1, frequency="monthly", bysetpos=3, - byweekday=["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"], + byweekday=[ + "monday", + "tuesday", + "wednesday", + "thursday", + "friday", + "saturday", + "sunday", + ], ) subscription.save() assert subscription.next_delivery_date == datetime(2022, 1, 3, 0, 0).replace(tzinfo=ZoneInfo("UTC")) @@ -185,7 +201,15 @@ def test_subscription_summary(self): subscription = self._create_insight_subscription( interval=1, frequency="monthly", - byweekday=["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"], + byweekday=[ + "monday", + "tuesday", + "wednesday", + "thursday", + "friday", + "saturday", + "sunday", + ], bysetpos=3, ) assert subscription.summary == "sent every month on the third day" diff --git a/posthog/models/test/test_user_model.py b/posthog/models/test/test_user_model.py index 7b805157266f7..fe26931522eac 100644 --- a/posthog/models/test/test_user_model.py +++ b/posthog/models/test/test_user_model.py @@ -12,7 +12,10 @@ def test_create_user_with_distinct_id(self): def test_analytics_metadata(self): # One org, one team, anonymized organization, team, user = User.objects.bootstrap( - organization_name="Test Org", email="test_org@posthog.com", password="12345678", anonymize_data=True + organization_name="Test Org", + email="test_org@posthog.com", + password="12345678", + anonymize_data=True, ) with self.is_cloud(True): diff --git a/posthog/models/uploaded_media.py b/posthog/models/uploaded_media.py index 0a25f452495f1..0161b71beb4f6 100644 --- a/posthog/models/uploaded_media.py +++ b/posthog/models/uploaded_media.py @@ -35,11 +35,19 @@ def get_absolute_url(self) -> str: @classmethod def save_content( - cls, team: Team, created_by: User, file_name: str, content_type: str, content: bytes + cls, + team: Team, + created_by: User, + file_name: str, + content_type: str, + content: bytes, ) -> Optional["UploadedMedia"]: try: media = UploadedMedia.objects.create( - team=team, created_by=created_by, file_name=file_name, content_type=content_type + team=team, + created_by=created_by, + file_name=file_name, + content_type=content_type, ) if settings.OBJECT_STORAGE_ENABLED: save_content_to_object_storage(media, content) @@ -54,7 +62,11 @@ def save_content( except ObjectStorageError as ose: capture_exception(ose) logger.error( - "uploaded_media.object-storage-error", file_name=file_name, team=team.pk, exception=ose, exc_info=True + "uploaded_media.object-storage-error", + file_name=file_name, + team=team.pk, + exception=ose, + exc_info=True, ) return None diff --git a/posthog/models/user.py b/posthog/models/user.py index b385f4b0fc8ab..423936747e2cc 100644 --- a/posthog/models/user.py +++ b/posthog/models/user.py @@ -78,7 +78,11 @@ def bootstrap( organization_fields.setdefault("name", organization_name) organization = Organization.objects.create(**organization_fields) user = self.create_user( - email=email, password=password, first_name=first_name, is_staff=is_staff, **user_fields + email=email, + password=password, + first_name=first_name, + is_staff=is_staff, + **user_fields, ) if create_team: team = create_team(organization, user) @@ -129,7 +133,10 @@ class User(AbstractUser, UUIDClassicModel): TOOLBAR_CHOICES = [(DISABLED, DISABLED), (TOOLBAR, TOOLBAR)] current_organization = models.ForeignKey( - "posthog.Organization", models.SET_NULL, null=True, related_name="users_currently+" + "posthog.Organization", + models.SET_NULL, + null=True, + related_name="users_currently+", ) current_team = models.ForeignKey("posthog.Team", models.SET_NULL, null=True, related_name="teams_currently+") email = models.EmailField(_("email address"), unique=True) @@ -168,7 +175,8 @@ def teams(self): """ teams = Team.objects.filter(organization__members=self) if Organization.objects.filter( - members=self, available_features__contains=[AvailableFeature.PROJECT_BASED_PERMISSIONING] + members=self, + available_features__contains=[AvailableFeature.PROJECT_BASED_PERMISSIONING], ).exists(): try: from ee.models import ExplicitTeamMembership @@ -210,7 +218,10 @@ def team(self) -> Optional[Team]: return self.current_team def join( - self, *, organization: Organization, level: OrganizationMembership.Level = OrganizationMembership.Level.MEMBER + self, + *, + organization: Organization, + level: OrganizationMembership.Level = OrganizationMembership.Level.MEMBER, ) -> OrganizationMembership: with transaction.atomic(): membership = OrganizationMembership.objects.create(user=self, organization=organization, level=level) diff --git a/posthog/models/user_scene_personalisation.py b/posthog/models/user_scene_personalisation.py index cf4d3c6ed724f..8b745f67a6808 100644 --- a/posthog/models/user_scene_personalisation.py +++ b/posthog/models/user_scene_personalisation.py @@ -8,10 +8,17 @@ class UserScenePersonalisation(UUIDModel): dashboard: models.ForeignKey = models.ForeignKey("Dashboard", on_delete=models.CASCADE, null=True, blank=True) team: models.ForeignKey = models.ForeignKey("Team", on_delete=models.CASCADE, null=True, blank=True) user: models.ForeignKey = models.ForeignKey( - "User", on_delete=models.CASCADE, null=True, blank=True, related_name="scene_personalisation" + "User", + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="scene_personalisation", ) class Meta: constraints = [ - models.UniqueConstraint(fields=["team", "user", "scene"], name="posthog_unique_scene_personalisation") + models.UniqueConstraint( + fields=["team", "user", "scene"], + name="posthog_unique_scene_personalisation", + ) ] diff --git a/posthog/models/utils.py b/posthog/models/utils.py index 0452084be4c4d..b00a87eb881c5 100644 --- a/posthog/models/utils.py +++ b/posthog/models/utils.py @@ -78,7 +78,7 @@ def get_series(cls, unix_time_ms: int) -> int: @classmethod def is_valid_uuid(cls, candidate: Any) -> bool: - if type(candidate) != str: + if not isinstance(candidate, str): return False hex = candidate.replace("urn:", "").replace("uuid:", "") hex = hex.strip("{}").replace("-", "") @@ -205,7 +205,9 @@ def create_with_slug(create_func: Callable[..., T], default_slug: str = "", *arg def get_deferred_field_set_for_model( - model: Type[models.Model], fields_not_deferred: Set[str] = set(), field_prefix: str = "" + model: Type[models.Model], + fields_not_deferred: Set[str] = set(), + field_prefix: str = "", ) -> Set[str]: """Return a set of field names to be deferred for a given model. Used with `.defer()` after `select_related` diff --git a/posthog/permissions.py b/posthog/permissions.py index 2a6339601cd9a..229a69a311b50 100644 --- a/posthog/permissions.py +++ b/posthog/permissions.py @@ -119,7 +119,6 @@ class OrganizationAdminWritePermissions(BasePermission): message = "Your organization access level is insufficient." def has_permission(self, request: Request, view) -> bool: - if request.method in SAFE_METHODS: return True @@ -136,7 +135,6 @@ def has_permission(self, request: Request, view) -> bool: ) def has_object_permission(self, request: Request, view, object: Model) -> bool: - if request.method in SAFE_METHODS: return True @@ -156,7 +154,7 @@ class TeamMemberAccessPermission(BasePermission): def has_permission(self, request, view) -> bool: try: - view.team + view.team # noqa: B018 except Team.DoesNotExist: return True # This will be handled as a 404 in the viewset requesting_level = view.user_permissions.current_team.effective_membership_level diff --git a/posthog/plugins/site.py b/posthog/plugins/site.py index 18b1dcbc947a4..9cb2b3023f80e 100644 --- a/posthog/plugins/site.py +++ b/posthog/plugins/site.py @@ -32,7 +32,13 @@ def get_transpiled_site_source(id: int, token: str) -> Optional[WebJsSource]: plugin__pluginsourcefile__filename="site.ts", plugin__pluginsourcefile__status=PluginSourceFile.Status.TRANSPILED, ) - .values_list("id", "plugin__pluginsourcefile__transpiled", "web_token", "plugin__config_schema", "config") + .values_list( + "id", + "plugin__pluginsourcefile__transpiled", + "web_token", + "plugin__config_schema", + "config", + ) .first() ) @@ -53,7 +59,13 @@ def get_decide_site_apps(team: "Team", using_database: str = "default") -> List[ plugin__pluginsourcefile__filename="site.ts", plugin__pluginsourcefile__status=PluginSourceFile.Status.TRANSPILED, ) - .values_list("id", "web_token", "plugin__pluginsourcefile__updated_at", "plugin__updated_at", "updated_at") + .values_list( + "id", + "web_token", + "plugin__pluginsourcefile__updated_at", + "plugin__updated_at", + "updated_at", + ) .all() ) diff --git a/posthog/plugins/test/mock.py b/posthog/plugins/test/mock.py index 91c2a473e7b42..04c61b17cf063 100644 --- a/posthog/plugins/test/mock.py +++ b/posthog/plugins/test/mock.py @@ -45,19 +45,34 @@ def ok(self): if args[0] == "https://api.github.com/repos/PostHog/posthog/commits?sha=&path=": return MockJSONResponse( - [{"sha": "MOCKLATESTCOMMIT", "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT"}], + [ + { + "sha": "MOCKLATESTCOMMIT", + "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT", + } + ], 200, ) if args[0] == "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=": return MockJSONResponse( - [{"sha": "MOCKLATESTCOMMIT", "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT"}], + [ + { + "sha": "MOCKLATESTCOMMIT", + "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT", + } + ], 200, ) if args[0] == "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=test/path/in/repo": return MockJSONResponse( - [{"sha": "MOCKLATESTCOMMIT", "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT"}], + [ + { + "sha": "MOCKLATESTCOMMIT", + "html_url": "https://www.github.com/PostHog/posthog/commit/MOCKLATESTCOMMIT", + } + ], 200, ) diff --git a/posthog/plugins/test/test_utils.py b/posthog/plugins/test/test_utils.py index d597db91017c5..d2f971073d481 100644 --- a/posthog/plugins/test/test_utils.py +++ b/posthog/plugins/test/test_utils.py @@ -41,7 +41,10 @@ def test_parse_github_urls(self, mock_get): self.assertEqual(parsed_url["tag"], "MOCKLATESTCOMMIT") self.assertEqual(parsed_url.get("path", None), None) self.assertEqual(mock_get.call_count, 1) - mock_get.assert_called_with("https://api.github.com/repos/PostHog/posthog/commits?sha=&path=", headers={}) + mock_get.assert_called_with( + "https://api.github.com/repos/PostHog/posthog/commits?sha=&path=", + headers={}, + ) mock_get.reset_mock() parsed_url = parse_url("https://github.com/PostHog/posthog/tree/82c9218ee40f561b7f37a22d6b6a0ca82887ee3e") @@ -54,7 +57,8 @@ def test_parse_github_urls(self, mock_get): mock_get.reset_mock() parsed_url = parse_url( - "https://github.com/PostHog/posthog/tree/82c9218ee40f561b7f37a22d6b6a0ca82887ee3e", get_latest_if_none=True + "https://github.com/PostHog/posthog/tree/82c9218ee40f561b7f37a22d6b6a0ca82887ee3e", + get_latest_if_none=True, ) self.assertEqual(parsed_url["type"], "github") self.assertEqual(parsed_url["user"], "PostHog") @@ -83,11 +87,15 @@ def test_parse_github_urls(self, mock_get): self.assertEqual(parsed_url["tag"], "MOCKLATESTCOMMIT") self.assertEqual(parsed_url.get("path", None), None) self.assertEqual(mock_get.call_count, 1) - mock_get.assert_called_with("https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=", headers={}) + mock_get.assert_called_with( + "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=", + headers={}, + ) mock_get.reset_mock() parsed_url = parse_url( - "https://github.com/PostHog/posthog/tree/main/test/path/in/repo", get_latest_if_none=True + "https://github.com/PostHog/posthog/tree/main/test/path/in/repo", + get_latest_if_none=True, ) self.assertEqual(parsed_url["type"], "github") self.assertEqual(parsed_url["user"], "PostHog") @@ -96,7 +104,8 @@ def test_parse_github_urls(self, mock_get): self.assertEqual(parsed_url["path"], "test/path/in/repo") self.assertEqual(mock_get.call_count, 1) mock_get.assert_called_with( - "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=test/path/in/repo", headers={} + "https://api.github.com/repos/PostHog/posthog/commits?sha=main&path=test/path/in/repo", + headers={}, ) mock_get.reset_mock() @@ -165,14 +174,18 @@ def test_parse_github_urls(self, mock_get): self.assertEqual(mock_get.call_count, 0) mock_get.reset_mock() - parsed_url = parse_url("https://github.com/PostHog/posthog?private_token=TOKEN", get_latest_if_none=True) + parsed_url = parse_url( + "https://github.com/PostHog/posthog?private_token=TOKEN", + get_latest_if_none=True, + ) self.assertEqual(parsed_url["type"], "github") self.assertEqual(parsed_url["user"], "PostHog") self.assertEqual(parsed_url["repo"], "posthog") self.assertEqual(parsed_url["tag"], "MOCKLATESTCOMMIT") self.assertEqual(parsed_url.get("path", None), None) mock_get.assert_called_with( - "https://api.github.com/repos/PostHog/posthog/commits?sha=&path=", headers={"Authorization": "Bearer TOKEN"} + "https://api.github.com/repos/PostHog/posthog/commits?sha=&path=", + headers={"Authorization": "Bearer TOKEN"}, ) self.assertEqual(mock_get.call_count, 1) mock_get.reset_mock() @@ -205,7 +218,10 @@ def test_parse_github_urls(self, mock_get): self.assertEqual(mock_get.call_count, 1) mock_get.reset_mock() - parsed_url = parse_url("https://github.com/PostHog/posthog?private_token=TOKEN", get_latest_if_none=True) + parsed_url = parse_url( + "https://github.com/PostHog/posthog?private_token=TOKEN", + get_latest_if_none=True, + ) self.assertEqual(parsed_url["type"], "github") self.assertEqual(parsed_url["user"], "PostHog") self.assertEqual(parsed_url["repo"], "posthog") @@ -233,34 +249,47 @@ def test_parse_gitlab_urls(self, mock_get): self.assertEqual(parsed_url.get("private_token", None), None) self.assertEqual(mock_get.call_count, 1) mock_get.assert_called_with( - "https://gitlab.com/api/v4/projects/mariusandra%2Fhelloworldplugin/repository/commits", headers={} + "https://gitlab.com/api/v4/projects/mariusandra%2Fhelloworldplugin/repository/commits", + headers={}, ) parsed_url = parse_url( "https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/tree/master" ) - self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline") + self.assertEqual( + parsed_url["project"], + "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline", + ) self.assertEqual(parsed_url["tag"], "master") self.assertEqual(mock_get.call_count, 1) parsed_url = parse_url( "https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/tree/2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1" ) - self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline") + self.assertEqual( + parsed_url["project"], + "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline", + ) self.assertEqual(parsed_url["tag"], "2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1") self.assertEqual(mock_get.call_count, 1) parsed_url = parse_url( "https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/commit/2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1" ) - self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline") + self.assertEqual( + parsed_url["project"], + "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline", + ) self.assertEqual(parsed_url["tag"], "2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1") self.assertEqual(mock_get.call_count, 1) parsed_url = parse_url( "https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/archive/master/openshift-custom-pipeline-master.zip" ) - self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline") + self.assertEqual( + parsed_url["project"], + "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline", + ) self.assertEqual(parsed_url["tag"], "master") self.assertEqual(mock_get.call_count, 1) @@ -273,7 +302,8 @@ def test_parse_gitlab_urls(self, mock_get): self.assertEqual(mock_get.call_count, 1) parsed_url = parse_url( - "https://gitlab.com/mariusandra/helloworldplugin?private_token=PRIVATE", get_latest_if_none=True + "https://gitlab.com/mariusandra/helloworldplugin?private_token=PRIVATE", + get_latest_if_none=True, ) self.assertEqual(parsed_url["type"], "gitlab") self.assertEqual(parsed_url["project"], "mariusandra/helloworldplugin") @@ -288,7 +318,10 @@ def test_parse_gitlab_urls(self, mock_get): parsed_url = parse_url( "https://gitlab.com/gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline/-/commit/2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1?private_token=PRIVATE" ) - self.assertEqual(parsed_url["project"], "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline") + self.assertEqual( + parsed_url["project"], + "gitlab-org/gl-openshift/openshift-demos/openshift-custom-pipeline", + ) self.assertEqual(parsed_url["tag"], "2b6494bdf8ad35073aafe36ca8a1bdfaf3dc72d1") self.assertEqual(parsed_url["private_token"], "PRIVATE") self.assertEqual(mock_get.call_count, 2) @@ -296,7 +329,8 @@ def test_parse_gitlab_urls(self, mock_get): # default global token with self.settings(GITLAB_TOKEN="MY_GITLAB_TOKEN"): parsed_url = parse_url( - "https://gitlab.com/mariusandra/helloworldplugin?private_token=PRIVATE", get_latest_if_none=True + "https://gitlab.com/mariusandra/helloworldplugin?private_token=PRIVATE", + get_latest_if_none=True, ) self.assertEqual(parsed_url["type"], "gitlab") self.assertEqual(parsed_url["project"], "mariusandra/helloworldplugin") @@ -308,7 +342,10 @@ def test_parse_gitlab_urls(self, mock_get): headers={"Authorization": "Bearer PRIVATE"}, ) - parsed_url = parse_url("https://gitlab.com/mariusandra/helloworldplugin", get_latest_if_none=True) + parsed_url = parse_url( + "https://gitlab.com/mariusandra/helloworldplugin", + get_latest_if_none=True, + ) self.assertEqual(parsed_url["type"], "gitlab") self.assertEqual(parsed_url["project"], "mariusandra/helloworldplugin") self.assertEqual(parsed_url["tag"], "ff78cbe1d70316055c610a962a8355a4616d874b") @@ -332,14 +369,20 @@ def test_parse_npm_urls(self, mock_get): self.assertEqual(parsed_url.get("tag", None), None) self.assertEqual(mock_get.call_count, 0) - parsed_url = parse_url("https://www.npmjs.com/package/posthog-helloworld-plugin", get_latest_if_none=True) + parsed_url = parse_url( + "https://www.npmjs.com/package/posthog-helloworld-plugin", + get_latest_if_none=True, + ) self.assertEqual(parsed_url["type"], "npm") self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin") self.assertEqual(parsed_url["tag"], "MOCK") self.assertEqual(mock_get.call_count, 1) mock_get.assert_called_with("https://registry.npmjs.org/posthog-helloworld-plugin/latest", headers={}) - parsed_url = parse_url("https://www.npmjs.com/package/@posthog/helloworldplugin", get_latest_if_none=True) + parsed_url = parse_url( + "https://www.npmjs.com/package/@posthog/helloworldplugin", + get_latest_if_none=True, + ) self.assertEqual(parsed_url["type"], "npm") self.assertEqual(parsed_url["pkg"], "@posthog/helloworldplugin") self.assertEqual(parsed_url["tag"], "MOCK") @@ -359,7 +402,8 @@ def test_parse_npm_urls(self, mock_get): self.assertEqual(mock_get.call_count, 2) parsed_url = parse_url( - "https://www.npmjs.com/package/posthog-helloworld-plugin/v/0.0.0", get_latest_if_none=True + "https://www.npmjs.com/package/posthog-helloworld-plugin/v/0.0.0", + get_latest_if_none=True, ) self.assertEqual(parsed_url["type"], "npm") self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin") @@ -368,7 +412,8 @@ def test_parse_npm_urls(self, mock_get): # private tokens parsed_url = parse_url( - "https://www.npmjs.com/package/posthog-helloworld-plugin?private_token=TOKEN", get_latest_if_none=True + "https://www.npmjs.com/package/posthog-helloworld-plugin?private_token=TOKEN", + get_latest_if_none=True, ) self.assertEqual(parsed_url["type"], "npm") self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin") @@ -376,7 +421,8 @@ def test_parse_npm_urls(self, mock_get): self.assertEqual(parsed_url["private_token"], "TOKEN") self.assertEqual(mock_get.call_count, 3) mock_get.assert_called_with( - "https://registry.npmjs.org/posthog-helloworld-plugin/latest", headers={"Authorization": "Bearer TOKEN"} + "https://registry.npmjs.org/posthog-helloworld-plugin/latest", + headers={"Authorization": "Bearer TOKEN"}, ) parsed_url = parse_url("https://www.npmjs.com/package/posthog-helloworld-plugin/v/0.0.0?private_token=TOKEN") @@ -396,7 +442,8 @@ def test_parse_npm_urls(self, mock_get): # default global token with self.settings(NPM_TOKEN="MY_NPM_TOKEN"): parsed_url = parse_url( - "https://www.npmjs.com/package/posthog-helloworld-plugin?private_token=TOKEN", get_latest_if_none=True + "https://www.npmjs.com/package/posthog-helloworld-plugin?private_token=TOKEN", + get_latest_if_none=True, ) self.assertEqual(parsed_url["type"], "npm") self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin") @@ -404,10 +451,14 @@ def test_parse_npm_urls(self, mock_get): self.assertEqual(parsed_url["private_token"], "TOKEN") self.assertEqual(mock_get.call_count, 4) mock_get.assert_called_with( - "https://registry.npmjs.org/posthog-helloworld-plugin/latest", headers={"Authorization": "Bearer TOKEN"} + "https://registry.npmjs.org/posthog-helloworld-plugin/latest", + headers={"Authorization": "Bearer TOKEN"}, ) - parsed_url = parse_url("https://www.npmjs.com/package/posthog-helloworld-plugin", get_latest_if_none=True) + parsed_url = parse_url( + "https://www.npmjs.com/package/posthog-helloworld-plugin", + get_latest_if_none=True, + ) self.assertEqual(parsed_url["type"], "npm") self.assertEqual(parsed_url["pkg"], "posthog-helloworld-plugin") self.assertEqual(parsed_url["tag"], "MOCK") @@ -488,8 +539,14 @@ def test_download_plugin_archive_github(self, mock_get): "https://github.com/PostHog/helloworldplugin/archive/f5a9ea85adaafe7c99014b7e8e0982c447631d54.zip", headers={}, ) - self.assertEqual(zip_file.getinfo("helloworldplugin-imageless-version/index.js").CRC, 1913611967) - self.assertEqual(zip_file.getinfo("helloworldplugin-imageless-version/plugin.json").CRC, 2713501883) + self.assertEqual( + zip_file.getinfo("helloworldplugin-imageless-version/index.js").CRC, + 1913611967, + ) + self.assertEqual( + zip_file.getinfo("helloworldplugin-imageless-version/plugin.json").CRC, + 2713501883, + ) def test_download_plugin_archive_gitlab(self, mock_get): plugin_gitlab = download_plugin_archive( @@ -542,7 +599,8 @@ def test_download_plugin_archive_npm(self, mock_get): self.assertEqual(plugin_npm_tgz, base64.b64decode(HELLO_WORLD_PLUGIN_NPM_TGZ[1])) self.assertEqual(mock_get.call_count, 1) mock_get.assert_called_with( - "https://registry.npmjs.org/posthog-helloworld-plugin/-/posthog-helloworld-plugin-0.0.0.tgz", headers={} + "https://registry.npmjs.org/posthog-helloworld-plugin/-/posthog-helloworld-plugin-0.0.0.tgz", + headers={}, ) plugin_npm_tgz = download_plugin_archive( @@ -576,21 +634,24 @@ def test_download_plugin_archive_npm(self, mock_get): def test_get_file_from_archive(self, mock_get): plugin_json_zip = cast( - dict, get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_GITHUB_ZIP[1]), "plugin.json") + dict, + get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_GITHUB_ZIP[1]), "plugin.json"), ) self.assertEqual(plugin_json_zip["name"], "helloworldplugin") self.assertEqual(plugin_json_zip["url"], "https://github.com/PostHog/helloworldplugin") self.assertEqual(plugin_json_zip["description"], "Greet the World and Foo a Bar, JS edition!") plugin_json_zip = cast( - dict, get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_GITLAB_ZIP[1]), "plugin.json") + dict, + get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_GITLAB_ZIP[1]), "plugin.json"), ) self.assertEqual(plugin_json_zip["name"], "hellojsplugin") self.assertEqual(plugin_json_zip["url"], "https://github.com/PosthHog/helloworldplugin") self.assertEqual(plugin_json_zip["description"], "Greet the World and Foo a Bar, JS edition!") plugin_json_tgz = cast( - dict, get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_NPM_TGZ[1]), "plugin.json") + dict, + get_file_from_archive(base64.b64decode(HELLO_WORLD_PLUGIN_NPM_TGZ[1]), "plugin.json"), ) self.assertEqual(plugin_json_tgz["name"], "helloworldplugin") self.assertEqual(plugin_json_tgz["url"], "https://github.com/PostHog/helloworldplugin") diff --git a/posthog/plugins/utils.py b/posthog/plugins/utils.py index 9ce8d867acbc4..45eeb5ca94843 100644 --- a/posthog/plugins/utils.py +++ b/posthog/plugins/utils.py @@ -47,7 +47,10 @@ def parse_github_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O parsed["tag"] = "refs/tags/{}".format(parsed["tag"]) elif not re.match(r"^[a-f0-9]{40}$", parsed["tag"] or ""): commits_url = "https://api.github.com/repos/{}/{}/commits?sha={}&path={}".format( - parsed["user"], parsed["repo"], parsed["tag"] or "", parsed["path"] or "" + parsed["user"], + parsed["repo"], + parsed["tag"] or "", + parsed["path"] or "", ) commits = requests.get(commits_url, headers=headers).json() @@ -95,7 +98,8 @@ def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O parsed["tag"] = path.split("/")[1] parsed["root_url"] = "https://gitlab.com/{}{}".format( - parsed["project"], "?private_token={}".format(private_token) if private_token else "" + parsed["project"], + "?private_token={}".format(private_token) if private_token else "", ) if get_latest_if_none and not parsed["tag"]: @@ -115,7 +119,9 @@ def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O if parsed["tag"]: parsed["tagged_url"] = "https://gitlab.com/{}/-/tree/{}{}".format( - parsed["project"], parsed["tag"], "?private_token={}".format(private_token) if private_token else "" + parsed["project"], + parsed["tag"], + "?private_token={}".format(private_token) if private_token else "", ) return parsed @@ -124,7 +130,8 @@ def parse_gitlab_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, O def parse_npm_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Optional[str]]]: url, private_token = split_url_and_private_token(url) match = re.search( - r"^https?://(?:www\.)?npmjs\.com/package/([@a-z0-9_-]+(/[a-z0-9_-]+)?)?/?(v/([A-Za-z0-9_.-]+)/?|)$", url + r"^https?://(?:www\.)?npmjs\.com/package/([@a-z0-9_-]+(/[a-z0-9_-]+)?)?/?(v/([A-Za-z0-9_.-]+)/?|)$", + url, ) if not match: return None @@ -136,19 +143,25 @@ def parse_npm_url(url: str, get_latest_if_none=False) -> Optional[Dict[str, Opti } parsed["root_url"] = "https://www.npmjs.com/package/{}{}".format( - parsed["pkg"], "?private_token={}".format(private_token) if private_token else "" + parsed["pkg"], + "?private_token={}".format(private_token) if private_token else "", ) if get_latest_if_none and not parsed["tag"]: try: token = private_token or settings.NPM_TOKEN headers = {"Authorization": "Bearer {}".format(token)} if token else {} - details = requests.get("https://registry.npmjs.org/{}/latest".format(parsed["pkg"]), headers=headers).json() + details = requests.get( + "https://registry.npmjs.org/{}/latest".format(parsed["pkg"]), + headers=headers, + ).json() parsed["tag"] = details["version"] except Exception: raise Exception("Could not get latest version for: {}".format(url)) if parsed["tag"]: parsed["tagged_url"] = "https://www.npmjs.com/package/{}/v/{}{}".format( - parsed["pkg"], parsed["tag"], "?private_token={}".format(private_token) if private_token else "" + parsed["pkg"], + parsed["tag"], + "?private_token={}".format(private_token) if private_token else "", ) return parsed @@ -184,7 +197,9 @@ def download_plugin_archive(url: str, tag: Optional[str] = None) -> bytes: if not (tag or parsed_url.get("tag", None)): raise Exception("No GitHub tag given!") url = "https://github.com/{user}/{repo}/archive/{tag}.zip".format( - user=parsed_url["user"], repo=parsed_url["repo"], tag=tag or parsed_url["tag"] + user=parsed_url["user"], + repo=parsed_url["repo"], + tag=tag or parsed_url["tag"], ) token = parsed_url["private_token"] or settings.GITHUB_TOKEN if token: @@ -259,9 +274,9 @@ def get_file_from_zip_archive(archive: bytes, filename: str, *, json_parse: bool file_bytes = reader.read() if json_parse: return json.loads(file_bytes) - if type(file_bytes) == bytes: + if isinstance(file_bytes, bytes): return file_bytes.decode("utf-8") - return str(file_bytes) + return str(file_bytes) # type: ignore def get_file_from_tgz_archive(archive: bytes, filename, *, json_parse: bool) -> Any: diff --git a/posthog/queries/actor_base_query.py b/posthog/queries/actor_base_query.py index 706829dfba191..396c216f9c01d 100644 --- a/posthog/queries/actor_base_query.py +++ b/posthog/queries/actor_base_query.py @@ -96,7 +96,7 @@ def is_aggregating_by_groups(self) -> bool: def get_actors( self, - ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int]: + ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int,]: """Get actors in data model and dict formats. Builds query and executes""" self._filter.team = self._team query, params = self.actor_query() @@ -109,13 +109,20 @@ def get_actors( ) actors, serialized_actors = self.get_actors_from_result(raw_result) - if hasattr(self._filter, "include_recordings") and self._filter.include_recordings and self._filter.insight in [INSIGHT_PATHS, INSIGHT_TRENDS, INSIGHT_FUNNELS]: # type: ignore + if ( + hasattr(self._filter, "include_recordings") + and self._filter.include_recordings # type: ignore + and self._filter.insight in [INSIGHT_PATHS, INSIGHT_TRENDS, INSIGHT_FUNNELS] + ): serialized_actors = self.add_matched_recordings_to_serialized_actors(serialized_actors, raw_result) return actors, serialized_actors, len(raw_result) def query_for_session_ids_with_recordings( - self, session_ids: Set[str], date_from: datetime | None, date_to: datetime | None + self, + session_ids: Set[str], + date_from: datetime | None, + date_to: datetime | None, ) -> Set[str]: """Filters a list of session_ids to those that actually have recordings""" query = """ @@ -154,7 +161,9 @@ def query_for_session_ids_with_recordings( return {row[0] for row in raw_result} def add_matched_recordings_to_serialized_actors( - self, serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]], raw_result + self, + serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]], + raw_result, ) -> Union[List[SerializedGroup], List[SerializedPerson]]: all_session_ids = set() @@ -172,7 +181,9 @@ def add_matched_recordings_to_serialized_actors( # Prune out deleted recordings session_ids_with_deleted_recordings = set( SessionRecording.objects.filter( - team=self._team, session_id__in=session_ids_with_all_recordings, deleted=True + team=self._team, + session_id__in=session_ids_with_all_recordings, + deleted=True, ).values_list("session_id", flat=True) ) session_ids_with_recordings = session_ids_with_all_recordings.difference(session_ids_with_deleted_recordings) @@ -206,7 +217,7 @@ def add_matched_recordings_to_serialized_actors( def get_actors_from_result( self, raw_result - ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]]]: + ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]],]: actors: Union[QuerySet[Person], QuerySet[Group]] serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]] @@ -215,7 +226,10 @@ def get_actors_from_result( if self.is_aggregating_by_groups: actors, serialized_actors = get_groups( - self._team.pk, cast(int, self.aggregation_group_type_index), actor_ids, value_per_actor_id + self._team.pk, + cast(int, self.aggregation_group_type_index), + actor_ids, + value_per_actor_id, ) else: actors, serialized_actors = get_people(self._team, actor_ids, value_per_actor_id) @@ -223,13 +237,19 @@ def get_actors_from_result( if self.ACTOR_VALUES_INCLUDED: # We fetched actors from Postgres in get_groups/get_people, so `ORDER BY actor_value DESC` no longer holds # We need .sort() to restore this order - serialized_actors.sort(key=lambda actor: cast(float, actor["value_at_data_point"]), reverse=True) + serialized_actors.sort( + key=lambda actor: cast(float, actor["value_at_data_point"]), + reverse=True, + ) return actors, serialized_actors def get_groups( - team_id: int, group_type_index: int, group_ids: List[Any], value_per_actor_id: Optional[Dict[str, float]] = None + team_id: int, + group_type_index: int, + group_ids: List[Any], + value_per_actor_id: Optional[Dict[str, float]] = None, ) -> Tuple[QuerySet[Group], List[SerializedGroup]]: """Get groups from raw SQL results in data model and dict formats""" groups: QuerySet[Group] = Group.objects.filter( @@ -239,7 +259,10 @@ def get_groups( def get_people( - team: Team, people_ids: List[Any], value_per_actor_id: Optional[Dict[str, float]] = None, distinct_id_limit=1000 + team: Team, + people_ids: List[Any], + value_per_actor_id: Optional[Dict[str, float]] = None, + distinct_id_limit=1000, ) -> Tuple[QuerySet[Person], List[SerializedPerson]]: """Get people from raw SQL results in data model and dict formats""" distinct_id_subquery = Subquery( @@ -263,7 +286,9 @@ def get_people( def serialize_people( - team: Team, data: Union[QuerySet[Person], List[Person]], value_per_actor_id: Optional[Dict[str, float]] = None + team: Team, + data: Union[QuerySet[Person], List[Person]], + value_per_actor_id: Optional[Dict[str, float]] = None, ) -> List[SerializedPerson]: from posthog.api.person import get_person_name diff --git a/posthog/queries/app_metrics/app_metrics.py b/posthog/queries/app_metrics/app_metrics.py index e6c36b799ff1e..26f91f626ec0b 100644 --- a/posthog/queries/app_metrics/app_metrics.py +++ b/posthog/queries/app_metrics/app_metrics.py @@ -13,7 +13,10 @@ from posthog.models.event.util import format_clickhouse_timestamp from posthog.models.filters.mixins.base import IntervalType from posthog.models.team.team import Team -from posthog.queries.app_metrics.serializers import AppMetricsErrorsRequestSerializer, AppMetricsRequestSerializer +from posthog.queries.app_metrics.serializers import ( + AppMetricsErrorsRequestSerializer, + AppMetricsRequestSerializer, +) from posthog.queries.util import format_ch_timestamp, get_time_in_seconds_for_period from posthog.utils import relative_date_parse @@ -27,7 +30,10 @@ def __init__(self, team: Team): def run(self): results = sync_execute( self.QUERY, - {"team_id": self.team.pk, "from_date": format_clickhouse_timestamp(datetime.now() - timedelta(hours=24))}, + { + "team_id": self.team.pk, + "from_date": format_clickhouse_timestamp(datetime.now() - timedelta(hours=24)), + }, ) return dict(results) @@ -80,7 +86,9 @@ def query(self): @property def date_from(self): return relative_date_parse( - self.filter.validated_data.get("date_from"), self.team.timezone_info, always_truncate=True + self.filter.validated_data.get("date_from"), + self.team.timezone_info, + always_truncate=True, ) @property @@ -121,7 +129,12 @@ def run(self): class AppMetricsErrorDetailsQuery: QUERY = QUERY_APP_METRICS_ERROR_DETAILS - def __init__(self, team: Team, plugin_config_id: int, filter: AppMetricsErrorsRequestSerializer): + def __init__( + self, + team: Team, + plugin_config_id: int, + filter: AppMetricsErrorsRequestSerializer, + ): self.team = team self.plugin_config_id = plugin_config_id self.filter = filter diff --git a/posthog/queries/app_metrics/historical_exports.py b/posthog/queries/app_metrics/historical_exports.py index 484f01546001b..cbf22d480156b 100644 --- a/posthog/queries/app_metrics/historical_exports.py +++ b/posthog/queries/app_metrics/historical_exports.py @@ -7,7 +7,10 @@ from posthog.models.activity_logging.activity_log import ActivityLog from posthog.models.plugin import PluginStorage from posthog.models.team.team import Team -from posthog.queries.app_metrics.app_metrics import AppMetricsErrorsQuery, AppMetricsQuery +from posthog.queries.app_metrics.app_metrics import ( + AppMetricsErrorsQuery, + AppMetricsQuery, +) from posthog.queries.app_metrics.serializers import AppMetricsRequestSerializer diff --git a/posthog/queries/app_metrics/test/test_app_metrics.py b/posthog/queries/app_metrics/test/test_app_metrics.py index affe411457116..95eeb13bc09f4 100644 --- a/posthog/queries/app_metrics/test/test_app_metrics.py +++ b/posthog/queries/app_metrics/test/test_app_metrics.py @@ -15,7 +15,10 @@ AppMetricsQuery, TeamPluginsDeliveryRateQuery, ) -from posthog.queries.app_metrics.serializers import AppMetricsErrorsRequestSerializer, AppMetricsRequestSerializer +from posthog.queries.app_metrics.serializers import ( + AppMetricsErrorsRequestSerializer, + AppMetricsRequestSerializer, +) from posthog.test.base import BaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries from posthog.utils import cast_timestamp_or_now @@ -100,7 +103,11 @@ def test_query_delivery_rate(self): @freeze_time("2021-12-05T13:23:00Z") def test_ignores_out_of_bound_metrics(self): create_app_metric( - team_id=-1, category="processEvent", plugin_config_id=3, timestamp="2021-12-05T00:10:00Z", successes=5 + team_id=-1, + category="processEvent", + plugin_config_id=3, + timestamp="2021-12-05T00:10:00Z", + successes=5, ) create_app_metric( team_id=self.team.pk, @@ -269,7 +276,11 @@ def test_ignores_unrelated_data(self): # Negative examples # Different team create_app_metric( - team_id=-1, category="processEvent", plugin_config_id=3, timestamp="2021-12-05T13:10:00Z", failures=1 + team_id=-1, + category="processEvent", + plugin_config_id=3, + timestamp="2021-12-05T13:10:00Z", + failures=1, ) # Different pluginConfigId create_app_metric( @@ -544,7 +555,9 @@ def test_error_details_query(self): ) filter = make_filter( - serializer_klass=AppMetricsErrorsRequestSerializer, category="processEvent", error_type="SomeError" + serializer_klass=AppMetricsErrorsRequestSerializer, + category="processEvent", + error_type="SomeError", ) results = AppMetricsErrorDetailsQuery(self.team, 3, filter).run() @@ -679,7 +692,9 @@ def test_ignores_unrelated_data(self): ) filter = make_filter( - serializer_klass=AppMetricsErrorsRequestSerializer, category="processEvent", error_type="SomeError" + serializer_klass=AppMetricsErrorsRequestSerializer, + category="processEvent", + error_type="SomeError", ) results = AppMetricsErrorDetailsQuery(self.team, 3, filter).run() diff --git a/posthog/queries/app_metrics/test/test_historical_exports.py b/posthog/queries/app_metrics/test/test_historical_exports.py index 2e9ffcb41a7bb..6bed36981931c 100644 --- a/posthog/queries/app_metrics/test/test_historical_exports.py +++ b/posthog/queries/app_metrics/test/test_historical_exports.py @@ -8,9 +8,17 @@ from posthog.models.plugin import Plugin, PluginConfig, PluginStorage from posthog.models.team.team import Team from posthog.models.utils import UUIDT -from posthog.queries.app_metrics.historical_exports import historical_export_metrics, historical_exports_activity +from posthog.queries.app_metrics.historical_exports import ( + historical_export_metrics, + historical_exports_activity, +) from posthog.queries.app_metrics.test.test_app_metrics import create_app_metric -from posthog.test.base import BaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries, snapshot_postgres_queries +from posthog.test.base import ( + BaseTest, + ClickhouseTestMixin, + snapshot_clickhouse_queries, + snapshot_postgres_queries, +) SAMPLE_PAYLOAD = {"dateRange": ["2021-06-10", "2022-06-12"], "parallelism": 1} @@ -32,12 +40,18 @@ def test_historical_exports_activity_for_not_finished_export(self): activity="job_triggered", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload=SAMPLE_PAYLOAD, + ), ), ) PluginStorage.objects.create( - plugin_config_id=self.plugin_config.pk, key="EXPORT_COORDINATION", value=json.dumps({"progress": 0.33}) + plugin_config_id=self.plugin_config.pk, + key="EXPORT_COORDINATION", + value=json.dumps({"progress": 0.33}), ) activities = historical_exports_activity(self.team.pk, self.plugin_config.pk) @@ -61,7 +75,11 @@ def test_historical_exports_activity_for_finished_export(self): activity="job_triggered", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload=SAMPLE_PAYLOAD, + ), ), ) with freeze_time("2021-08-25T13:00:00Z"): @@ -69,7 +87,11 @@ def test_historical_exports_activity_for_finished_export(self): activity="export_success", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload={}), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload={}, + ), ), ) @@ -95,7 +117,11 @@ def test_historical_exports_activity_for_failed_export(self): activity="job_triggered", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload=SAMPLE_PAYLOAD, + ), ), ) with freeze_time("2021-08-25T13:00:00Z"): @@ -104,7 +130,9 @@ def test_historical_exports_activity_for_failed_export(self): detail=Detail( name="Some export plugin", trigger=Trigger( - job_type="Export historical events V2", job_id="1234", payload={"failure_reason": "foobar"} + job_type="Export historical events V2", + job_id="1234", + payload={"failure_reason": "foobar"}, ), ), ) @@ -130,7 +158,11 @@ def test_historical_exports_activity_ignores_unrelated_entries(self): activity="job_triggered", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload=SAMPLE_PAYLOAD, + ), ), ) @@ -192,7 +224,9 @@ def test_historical_exports_orders_activity_by_created_at(self): detail=Detail( name="Some export plugin", trigger=Trigger( - job_type="Export historical events V2", job_id=str(hour), payload=SAMPLE_PAYLOAD + job_type="Export historical events V2", + job_id=str(hour), + payload=SAMPLE_PAYLOAD, ), ), ) @@ -218,7 +252,11 @@ def test_historical_export_metrics(self): activity="job_triggered", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload=SAMPLE_PAYLOAD), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload=SAMPLE_PAYLOAD, + ), ), ) with freeze_time("2021-08-25T05:00:00Z"): @@ -226,7 +264,11 @@ def test_historical_export_metrics(self): activity="export_success", detail=Detail( name="Some export plugin", - trigger=Trigger(job_type="Export historical events V2", job_id="1234", payload={}), + trigger=Trigger( + job_type="Export historical events V2", + job_id="1234", + payload={}, + ), ), ) @@ -276,7 +318,11 @@ def test_historical_export_metrics(self): "successes": [0, 102, 0, 10, 0, 0, 0], "successes_on_retry": [0, 0, 0, 0, 0, 0, 0], "failures": [0, 0, 2, 0, 0, 0, 0], - "totals": {"successes": 112, "successes_on_retry": 0, "failures": 2}, + "totals": { + "successes": 112, + "successes_on_retry": 0, + "failures": 2, + }, }, "summary": { "duration": 4 * 60 * 60, diff --git a/posthog/queries/base.py b/posthog/queries/base.py index 57ff555c2dcc8..f03e6723ac72e 100644 --- a/posthog/queries/base.py +++ b/posthog/queries/base.py @@ -19,7 +19,11 @@ from posthog.models.cohort import Cohort, CohortPeople from posthog.models.filters.filter import Filter from posthog.models.filters.path_filter import PathFilter -from posthog.models.property import CLICKHOUSE_ONLY_PROPERTY_TYPES, Property, PropertyGroup +from posthog.models.property import ( + CLICKHOUSE_ONLY_PROPERTY_TYPES, + Property, + PropertyGroup, +) from posthog.models.property.property import OperatorType, ValueT from posthog.models.team import Team from posthog.queries.util import convert_to_datetime_aware @@ -181,9 +185,12 @@ def match_property(property: Property, override_property_values: Dict[str, Any]) def empty_or_null_with_value_q( - column: str, key: str, operator: Optional[OperatorType], value: ValueT, negated: bool = False + column: str, + key: str, + operator: Optional[OperatorType], + value: ValueT, + negated: bool = False, ) -> Q: - if operator == "exact" or operator is None: value_as_given = Property._parse_value(value) value_as_coerced_to_number = Property._parse_value(value, convert_to_number=True) @@ -220,13 +227,11 @@ def property_to_Q( cohorts_cache: Optional[Dict[int, Cohort]] = None, using_database: str = "default", ) -> Q: - if property.type in CLICKHOUSE_ONLY_PROPERTY_TYPES: raise ValueError(f"property_to_Q: type is not supported: {repr(property.type)}") value = property._parse_value(property.value) if property.type == "cohort": - cohort_id = int(cast(Union[str, int], value)) if cohorts_cache is not None: if cohorts_cache.get(cohort_id) is None: @@ -239,14 +244,23 @@ def property_to_Q( return Q( Exists( CohortPeople.objects.using(using_database) - .filter(cohort_id=cohort_id, person_id=OuterRef("id"), cohort__id=cohort_id) + .filter( + cohort_id=cohort_id, + person_id=OuterRef("id"), + cohort__id=cohort_id, + ) .only("id") ) ) else: # :TRICKY: This has potential to create an infinite loop if the cohort is recursive. # But, this shouldn't happen because we check for cyclic cohorts on creation. - return property_group_to_Q(cohort.properties, override_property_values, cohorts_cache, using_database) + return property_group_to_Q( + cohort.properties, + override_property_values, + cohorts_cache, + using_database, + ) # short circuit query if key exists in override_property_values if property.key in override_property_values and property.operator != "is_not_set": @@ -277,7 +291,11 @@ def property_to_Q( return Q(pk=-1) if isinstance(property.operator, str) and property.operator.startswith("not_"): return empty_or_null_with_value_q( - column, property.key, cast(OperatorType, property.operator[4:]), value, negated=True + column, + property.key, + cast(OperatorType, property.operator[4:]), + value, + negated=True, ) if property.operator in ("is_date_after", "is_date_before"): @@ -294,7 +312,6 @@ def property_group_to_Q( cohorts_cache: Optional[Dict[int, Cohort]] = None, using_database: str = "default", ) -> Q: - filters = Q() if not property_group or len(property_group.values) == 0: @@ -303,7 +320,10 @@ def property_group_to_Q( if isinstance(property_group.values[0], PropertyGroup): for group in property_group.values: group_filter = property_group_to_Q( - cast(PropertyGroup, group), override_property_values, cohorts_cache, using_database + cast(PropertyGroup, group), + override_property_values, + cohorts_cache, + using_database, ) if property_group.type == PropertyOperatorType.OR: filters |= group_filter diff --git a/posthog/queries/breakdown_props.py b/posthog/queries/breakdown_props.py index 9d0ccf80db32e..a7a320e8e5dfa 100644 --- a/posthog/queries/breakdown_props.py +++ b/posthog/queries/breakdown_props.py @@ -2,7 +2,12 @@ from django.forms import ValidationError -from posthog.constants import BREAKDOWN_TYPES, MONTHLY_ACTIVE, WEEKLY_ACTIVE, PropertyOperatorType +from posthog.constants import ( + BREAKDOWN_TYPES, + MONTHLY_ACTIVE, + WEEKLY_ACTIVE, + PropertyOperatorType, +) from posthog.hogql.hogql import HogQLContext from posthog.models.cohort import Cohort from posthog.models.cohort.util import format_filter_query @@ -26,7 +31,10 @@ from posthog.queries.person_query import PersonQuery from posthog.queries.query_date_range import QueryDateRange from posthog.session_recordings.queries.session_query import SessionQuery -from posthog.queries.trends.sql import HISTOGRAM_ELEMENTS_ARRAY_OF_KEY_SQL, TOP_ELEMENTS_ARRAY_OF_KEY_SQL +from posthog.queries.trends.sql import ( + HISTOGRAM_ELEMENTS_ARRAY_OF_KEY_SQL, + TOP_ELEMENTS_ARRAY_OF_KEY_SQL, +) from posthog.queries.util import PersonPropertiesMode from posthog.utils import PersonOnEventsMode @@ -98,7 +106,10 @@ def get_breakdown_prop_values( ) person_query = PersonQuery( - filter, team.pk, column_optimizer=column_optimizer, entity=entity if not use_all_funnel_entities else None + filter, + team.pk, + column_optimizer=column_optimizer, + entity=entity if not use_all_funnel_entities else None, ) if person_properties_mode == PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2: person_join_clauses = PERSON_OVERRIDES_JOIN_SQL.format( @@ -160,7 +171,10 @@ def get_breakdown_prop_values( filter.hogql_context, filter.breakdown_normalize_url, direct_on_events=person_properties_mode - in [PersonPropertiesMode.DIRECT_ON_EVENTS, PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2], + in [ + PersonPropertiesMode.DIRECT_ON_EVENTS, + PersonPropertiesMode.DIRECT_ON_EVENTS_WITH_POE_V2, + ], cast_as_float=filter.using_histogram, ) @@ -269,7 +283,11 @@ def _to_value_expression( value_expression = translate_hogql(cast(str, breakdown), hogql_context) else: value_expression = get_single_or_multi_property_string_expr( - breakdown, table="events", query_alias=None, column="properties", normalize_url=breakdown_normalize_url + breakdown, + table="events", + query_alias=None, + column="properties", + normalize_url=breakdown_normalize_url, ) if cast_as_float: diff --git a/posthog/queries/cohort_query.py b/posthog/queries/cohort_query.py index ec1a1d7f9efff..1c1b697bfc222 100644 --- a/posthog/queries/cohort_query.py +++ b/posthog/queries/cohort_query.py @@ -1,6 +1,8 @@ from posthog.settings import EE_AVAILABLE if EE_AVAILABLE: - from ee.clickhouse.queries.enterprise_cohort_query import EnterpriseCohortQuery as CohortQuery + from ee.clickhouse.queries.enterprise_cohort_query import ( + EnterpriseCohortQuery as CohortQuery, + ) else: from posthog.queries.foss_cohort_query import FOSSCohortQuery as CohortQuery # type: ignore diff --git a/posthog/queries/column_optimizer/column_optimizer.py b/posthog/queries/column_optimizer/column_optimizer.py index 2d49afeeaf8dd..f1952e500c169 100644 --- a/posthog/queries/column_optimizer/column_optimizer.py +++ b/posthog/queries/column_optimizer/column_optimizer.py @@ -2,7 +2,9 @@ from posthog.settings import EE_AVAILABLE if EE_AVAILABLE: - from ee.clickhouse.queries.column_optimizer import EnterpriseColumnOptimizer as ColumnOptimizer + from ee.clickhouse.queries.column_optimizer import ( + EnterpriseColumnOptimizer as ColumnOptimizer, + ) else: from posthog.queries.column_optimizer.foss_column_optimizer import ( # type: ignore FOSSColumnOptimizer as ColumnOptimizer, diff --git a/posthog/queries/column_optimizer/foss_column_optimizer.py b/posthog/queries/column_optimizer/foss_column_optimizer.py index 104f3cd000137..19487a7cb5de4 100644 --- a/posthog/queries/column_optimizer/foss_column_optimizer.py +++ b/posthog/queries/column_optimizer/foss_column_optimizer.py @@ -4,7 +4,10 @@ from posthog.clickhouse.materialized_columns import ColumnName, get_materialized_columns from posthog.constants import TREND_FILTER_TYPE_ACTIONS, FunnelCorrelationType -from posthog.models.action.util import get_action_tables_and_properties, uses_elements_chain +from posthog.models.action.util import ( + get_action_tables_and_properties, + uses_elements_chain, +) from posthog.models.entity import Entity from posthog.models.filters import Filter from posthog.models.filters.mixins.utils import cached_property @@ -13,7 +16,11 @@ from posthog.models.filters.retention_filter import RetentionFilter from posthog.models.filters.stickiness_filter import StickinessFilter from posthog.models.filters.utils import GroupTypeIndex -from posthog.models.property import PropertyIdentifier, PropertyType, TableWithProperties +from posthog.models.property import ( + PropertyIdentifier, + PropertyType, + TableWithProperties, +) from posthog.models.property.util import box_value, extract_tables_and_properties from posthog.queries.property_optimizer import PropertyOptimizer @@ -27,7 +34,13 @@ class FOSSColumnOptimizer: def __init__( self, - filter: Union[Filter, PathFilter, RetentionFilter, StickinessFilter, PropertiesTimelineFilter], + filter: Union[ + Filter, + PathFilter, + RetentionFilter, + StickinessFilter, + PropertiesTimelineFilter, + ], team_id: int, ): self.filter = filter @@ -53,7 +66,10 @@ def person_columns_to_query(self) -> Set[ColumnName]: return self.columns_to_query("person", set(self.used_properties_with_type("person"))) def columns_to_query( - self, table: TableWithProperties, used_properties: Set[PropertyIdentifier], table_column: str = "properties" + self, + table: TableWithProperties, + used_properties: Set[PropertyIdentifier], + table_column: str = "properties", ) -> Set[ColumnName]: "Transforms a list of property names to what columns are needed for that query" @@ -119,12 +135,24 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]: boxed_breakdown = box_value(self.filter.breakdown) for b in boxed_breakdown: if isinstance(b, str): - counter[(b, self.filter.breakdown_type, self.filter.breakdown_group_type_index)] += 1 + counter[ + ( + b, + self.filter.breakdown_type, + self.filter.breakdown_group_type_index, + ) + ] += 1 # If we have a breakdowns attribute then make sure we pull in everything we # need to calculate it for breakdown in self.filter.breakdowns or []: - counter[(breakdown["property"], breakdown["type"], self.filter.breakdown_group_type_index)] += 1 + counter[ + ( + breakdown["property"], + breakdown["type"], + self.filter.breakdown_group_type_index, + ) + ] += 1 # Both entities and funnel exclusions can contain nested property filters for entity in self.entities_used_in_filter(): @@ -147,7 +175,6 @@ def properties_used_in_filter(self) -> TCounter[PropertyIdentifier]: and self.filter.correlation_type == FunnelCorrelationType.PROPERTIES and self.filter.correlation_property_names ): - for prop_value in self.filter.correlation_property_names: counter[(prop_value, "person", None)] += 1 @@ -157,7 +184,11 @@ def used_properties_with_type(self, property_type: PropertyType) -> TCounter[Pro return Counter( { (name, type, group_type_index): count - for (name, type, group_type_index), count in self.properties_used_in_filter.items() + for ( + name, + type, + group_type_index, + ), count in self.properties_used_in_filter.items() if type == property_type } ) diff --git a/posthog/queries/event_query/event_query.py b/posthog/queries/event_query/event_query.py index 9be4dc1a2fbd2..5018892060873 100644 --- a/posthog/queries/event_query/event_query.py +++ b/posthog/queries/event_query/event_query.py @@ -47,7 +47,12 @@ class EventQuery(metaclass=ABCMeta): def __init__( self, filter: Union[ - Filter, PathFilter, RetentionFilter, StickinessFilter, SessionRecordingsFilter, PropertiesTimelineFilter + Filter, + PathFilter, + RetentionFilter, + StickinessFilter, + SessionRecordingsFilter, + PropertiesTimelineFilter, ], team: Team, round_interval=False, @@ -68,7 +73,10 @@ def __init__( self._extra_event_properties = extra_event_properties self._column_optimizer = ColumnOptimizer(self._filter, self._team_id) self._extra_person_fields = extra_person_fields - self.params: Dict[str, Any] = {"team_id": self._team_id, "timezone": team.timezone} + self.params: Dict[str, Any] = { + "team_id": self._team_id, + "timezone": team.timezone, + } self._should_join_distinct_ids = should_join_distinct_ids self._should_join_persons = should_join_persons @@ -183,7 +191,12 @@ def _does_cohort_need_persons(self, prop: Property) -> bool: def _person_query(self) -> PersonQuery: if isinstance(self._filter, PropertiesTimelineFilter): raise Exception("Properties Timeline never needs person query") - return PersonQuery(self._filter, self._team_id, self._column_optimizer, extra_fields=self._extra_person_fields) + return PersonQuery( + self._filter, + self._team_id, + self._column_optimizer, + extra_fields=self._extra_person_fields, + ) def _get_person_query(self) -> Tuple[str, Dict]: if self._should_join_persons: @@ -205,7 +218,11 @@ def _get_groups_query(self) -> Tuple[str, Dict]: def _sessions_query(self) -> SessionQuery: if isinstance(self._filter, PropertiesTimelineFilter): raise Exception("Properties Timeline never needs sessions query") - return SessionQuery(filter=self._filter, team=self._team, session_id_alias=self._session_id_alias) + return SessionQuery( + filter=self._filter, + team=self._team, + session_id_alias=self._session_id_alias, + ) def _get_sessions_query(self) -> Tuple[str, Dict]: if self._should_join_sessions: diff --git a/posthog/queries/foss_cohort_query.py b/posthog/queries/foss_cohort_query.py index b9fc8511b6301..e6005abab632a 100644 --- a/posthog/queries/foss_cohort_query.py +++ b/posthog/queries/foss_cohort_query.py @@ -5,9 +5,19 @@ from posthog.models import Filter, Team from posthog.models.action import Action from posthog.models.cohort import Cohort -from posthog.models.cohort.util import format_static_cohort_query, get_count_operator, get_entity_query +from posthog.models.cohort.util import ( + format_static_cohort_query, + get_count_operator, + get_entity_query, +) from posthog.models.filters.mixins.utils import cached_property -from posthog.models.property import BehavioralPropertyType, OperatorInterval, Property, PropertyGroup, PropertyName +from posthog.models.property import ( + BehavioralPropertyType, + OperatorInterval, + Property, + PropertyGroup, + PropertyName, +) from posthog.models.property.util import prop_filter_json_extract from posthog.queries.event_query import EventQuery from posthog.queries.util import PersonPropertiesMode @@ -17,7 +27,14 @@ Event = Tuple[str, Union[str, int]] -INTERVAL_TO_SECONDS = {"minute": 60, "hour": 3600, "day": 86400, "week": 604800, "month": 2592000, "year": 31536000} +INTERVAL_TO_SECONDS = { + "minute": 60, + "hour": 3600, + "day": 86400, + "week": 604800, + "month": 2592000, + "year": 31536000, +} def relative_date_to_seconds(date: Tuple[Optional[int], Union[OperatorInterval, None]]): @@ -101,7 +118,6 @@ def if_condition(condition: str, true_res: str, false_res: str) -> str: class FOSSCohortQuery(EventQuery): - BEHAVIOR_QUERY_ALIAS = "behavior_query" FUNNEL_QUERY_ALIAS = "funnel_query" SEQUENCE_FIELD_ALIAS = "steps" @@ -205,7 +221,13 @@ def _unwrap(property_group: PropertyGroup, negate_group: bool = False) -> Proper new_property_group_list.append( PropertyGroup( type=PropertyOperatorType.AND, - values=[Property(key="fake_key_01r2ho", value=0, type="person")], + values=[ + Property( + key="fake_key_01r2ho", + value=0, + type="person", + ) + ], ) ) else: @@ -228,7 +250,6 @@ def _unwrap(property_group: PropertyGroup, negate_group: bool = False) -> Proper # Implemented in /ee def get_query(self) -> Tuple[str, Dict[str, Any]]: - if not self._outer_property_groups: # everything is pushed down, no behavioral stuff to do # thus, use personQuery directly @@ -240,7 +261,11 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: subq = [] - behavior_subquery, behavior_subquery_params, behavior_query_alias = self._get_behavior_subquery() + ( + behavior_subquery, + behavior_subquery_params, + behavior_query_alias, + ) = self._get_behavior_subquery() subq.append((behavior_subquery, behavior_query_alias)) self.params.update(behavior_subquery_params) @@ -302,7 +327,6 @@ def _get_behavior_subquery(self) -> Tuple[str, Dict[str, Any], str]: query, params = "", {} if self._should_join_behavioral_query: - _fields = [ f"{self.DISTINCT_ID_TABLE_ALIAS if self._person_on_events_mode == PersonOnEventsMode.DISABLED else self.EVENT_TABLE_ALIAS}.person_id AS person_id" ] @@ -328,7 +352,12 @@ def _get_behavior_subquery(self) -> Tuple[str, Dict[str, Any], str]: query, params = ( query, - {"team_id": self._team_id, event_param_name: self._events, **date_params, **person_prop_params}, + { + "team_id": self._team_id, + event_param_name: self._events, + **date_params, + **person_prop_params, + }, ) return query, params, self.BEHAVIOR_QUERY_ALIAS @@ -389,7 +418,6 @@ def build_conditions(prop: Optional[Union[PropertyGroup, Property]], prepend="le # Implemented in /ee def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: - res: str = "" params: Dict[str, Any] = {} @@ -412,7 +440,12 @@ def _get_condition_for_property(self, prop: Property, prepend: str, idx: int) -> def get_person_condition(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: if self._outer_property_groups and len(self._outer_property_groups.flat): return prop_filter_json_extract( - prop, idx, prepend, prop_var="person_props", allow_denormalized_props=True, property_operator="" + prop, + idx, + prepend, + prop_var="person_props", + allow_denormalized_props=True, + property_operator="", ) else: return "", {} @@ -440,7 +473,10 @@ def get_performed_event_condition(self, prop: Property, prepend: str, idx: int) self._fields.append(field) # Negation is handled in the where clause to ensure the right result if a full join occurs where the joined person did not perform the event - return f"{'NOT' if prop.negation else ''} {column_name}", {f"{date_param}": date_value, **entity_params} + return f"{'NOT' if prop.negation else ''} {column_name}", { + f"{date_param}": date_value, + **entity_params, + } def get_performed_event_multiple(self, prop: Property, prepend: str, idx: int) -> Tuple[str, Dict[str, Any]]: event = (prop.event_type, prop.key) @@ -461,7 +497,11 @@ def get_performed_event_multiple(self, prop: Property, prepend: str, idx: int) - # Negation is handled in the where clause to ensure the right result if a full join occurs where the joined person did not perform the event return ( f"{'NOT' if prop.negation else ''} {column_name}", - {f"{operator_value_param}": count, f"{date_param}": date_value, **entity_params}, + { + f"{operator_value_param}": count, + f"{date_param}": date_value, + **entity_params, + }, ) def _determine_should_join_distinct_ids(self) -> None: @@ -497,7 +537,10 @@ def _validate_negations(self) -> None: pass def _get_entity( - self, event: Tuple[Optional[str], Optional[Union[int, str]]], prepend: str, idx: int + self, + event: Tuple[Optional[str], Optional[Union[int, str]]], + prepend: str, + idx: int, ) -> Tuple[str, Dict[str, Any]]: res: str = "" params: Dict[str, Any] = {} @@ -508,12 +551,20 @@ def _get_entity( if event[0] == "actions": self._add_action(int(event[1])) res, params = get_entity_query( - None, int(event[1]), self._team_id, f"{prepend}_entity_{idx}", self._filter.hogql_context + None, + int(event[1]), + self._team_id, + f"{prepend}_entity_{idx}", + self._filter.hogql_context, ) elif event[0] == "events": self._add_event(str(event[1])) res, params = get_entity_query( - str(event[1]), None, self._team_id, f"{prepend}_entity_{idx}", self._filter.hogql_context + str(event[1]), + None, + self._team_id, + f"{prepend}_entity_{idx}", + self._filter.hogql_context, ) else: raise ValueError(f"Event type must be 'events' or 'actions'") diff --git a/posthog/queries/funnels/base.py b/posthog/queries/funnels/base.py index 32cfadf4abd1e..8ac25880932a7 100644 --- a/posthog/queries/funnels/base.py +++ b/posthog/queries/funnels/base.py @@ -76,7 +76,10 @@ def __init__( if self._filter.funnel_window_days: self._filter = self._filter.shallow_clone( - {FUNNEL_WINDOW_INTERVAL: self._filter.funnel_window_days, FUNNEL_WINDOW_INTERVAL_UNIT: "day"} + { + FUNNEL_WINDOW_INTERVAL: self._filter.funnel_window_days, + FUNNEL_WINDOW_INTERVAL_UNIT: "day", + } ) if not self._filter.limit: @@ -308,7 +311,6 @@ def _get_timestamp_selects(self) -> Tuple[str, str]: target_step -= 1 if self._include_preceding_timestamp: - if target_step == 0: raise ValueError("Cannot request preceding step timestamp if target funnel step is the first step") @@ -391,7 +393,6 @@ def _get_exclusion_condition(self): return "" def _get_sorting_condition(self, curr_index: int, max_steps: int): - if curr_index == 1: return "1" @@ -414,7 +415,11 @@ def _get_sorting_condition(self, curr_index: int, max_steps: int): return f"if({' AND '.join(conditions)}, {curr_index}, {self._get_sorting_condition(curr_index - 1, max_steps)})" def _get_inner_event_query( - self, entities=None, entity_name="events", skip_entity_filter=False, skip_step_filter=False + self, + entities=None, + entity_name="events", + skip_entity_filter=False, + skip_step_filter=False, ) -> str: entities_to_use = entities or self._filter.entities @@ -444,7 +449,12 @@ def _get_inner_event_query( all_step_cols.extend(step_cols) for exclusion_id, entity in enumerate(self._filter.exclusions): - step_cols = self._get_step_col(entity, entity.funnel_from_step, entity_name, f"exclusion_{exclusion_id}_") + step_cols = self._get_step_col( + entity, + entity.funnel_from_step, + entity_name, + f"exclusion_{exclusion_id}_", + ) # every exclusion entity has the form: exclusion__step_i & timestamp exclusion__latest_i # where i is the starting step for exclusion on that entity all_step_cols.extend(step_cols) @@ -715,7 +725,6 @@ def _get_breakdown_select_prop(self) -> str: self.params.update({"breakdown": self._filter.breakdown}) if self._filter.breakdown_type == "person": - if self._team.person_on_events_mode != PersonOnEventsMode.DISABLED: basic_prop_selector = get_single_or_multi_property_string_expr( self._filter.breakdown, @@ -727,7 +736,10 @@ def _get_breakdown_select_prop(self) -> str: ) else: basic_prop_selector = get_single_or_multi_property_string_expr( - self._filter.breakdown, table="person", query_alias="prop_basic", column="person_props" + self._filter.breakdown, + table="person", + query_alias="prop_basic", + column="person_props", ) elif self._filter.breakdown_type == "event": basic_prop_selector = get_single_or_multi_property_string_expr( @@ -756,7 +768,10 @@ def _get_breakdown_select_prop(self) -> str: else: properties_field = f"group_properties_{self._filter.breakdown_group_type_index}" expression, _ = get_property_string_expr( - table="groups", property_name=self._filter.breakdown, var="%(breakdown)s", column=properties_field + table="groups", + property_name=self._filter.breakdown, + var="%(breakdown)s", + column=properties_field, ) basic_prop_selector = f"{expression} AS prop_basic" elif self._filter.breakdown_type == "hogql": @@ -789,7 +804,6 @@ def _get_breakdown_select_prop(self) -> str: BreakdownAttributionType.FIRST_TOUCH, BreakdownAttributionType.LAST_TOUCH, ]: - prop_conditional = ( "notEmpty(arrayFilter(x -> notEmpty(x), prop))" if self._query_has_array_breakdown() @@ -833,7 +847,10 @@ def _get_breakdown_conditions(self) -> Optional[str]: if self._filter.breakdown: use_all_funnel_entities = ( self._filter.breakdown_attribution_type - in [BreakdownAttributionType.FIRST_TOUCH, BreakdownAttributionType.LAST_TOUCH] + in [ + BreakdownAttributionType.FIRST_TOUCH, + BreakdownAttributionType.LAST_TOUCH, + ] or self._filter.funnel_order_type == FunnelOrderType.UNORDERED ) first_entity = self._filter.entities[0] @@ -860,7 +877,11 @@ def _get_breakdown_conditions(self) -> Optional[str]: def _get_breakdown_prop(self, group_remaining=False) -> str: if self._filter.breakdown: other_aggregation = "['Other']" if self._query_has_array_breakdown() else "'Other'" - if group_remaining and self._filter.breakdown_type in ["person", "event", "group"]: + if group_remaining and self._filter.breakdown_type in [ + "person", + "event", + "group", + ]: return f", if(has(%(breakdown_values)s, prop), prop, {other_aggregation}) as prop" else: # Cohorts don't have "Other" aggregation diff --git a/posthog/queries/funnels/funnel.py b/posthog/queries/funnels/funnel.py index 79f0c69898214..e1ac23f00d637 100644 --- a/posthog/queries/funnels/funnel.py +++ b/posthog/queries/funnels/funnel.py @@ -111,7 +111,6 @@ def get_comparison_cols(self, level_index: int, max_steps: int): return ", ".join(cols) def build_step_subquery(self, level_index: int, max_steps: int, event_names_alias: str = "events"): - if level_index >= max_steps: return f""" SELECT diff --git a/posthog/queries/funnels/funnel_event_query.py b/posthog/queries/funnels/funnel_event_query.py index be41ec9116bf8..dad407abffa0d 100644 --- a/posthog/queries/funnels/funnel_event_query.py +++ b/posthog/queries/funnels/funnel_event_query.py @@ -21,7 +21,9 @@ def get_query( # Aggregating by group if self._filter.aggregation_group_type_index is not None: aggregation_target = get_aggregation_target_field( - self._filter.aggregation_group_type_index, self.EVENT_TABLE_ALIAS, self._person_id_alias + self._filter.aggregation_group_type_index, + self.EVENT_TABLE_ALIAS, + self._person_id_alias, ) # Aggregating by HogQL diff --git a/posthog/queries/funnels/funnel_persons.py b/posthog/queries/funnels/funnel_persons.py index 5153473857eba..5cebef5fb7dcd 100644 --- a/posthog/queries/funnels/funnel_persons.py +++ b/posthog/queries/funnels/funnel_persons.py @@ -15,7 +15,11 @@ class ClickhouseFunnelActors(ClickhouseFunnel, ActorBaseQuery): def aggregation_group_type_index(self): return self._filter.aggregation_group_type_index - def actor_query(self, limit_actors: Optional[bool] = True, extra_fields: Optional[List[str]] = None): + def actor_query( + self, + limit_actors: Optional[bool] = True, + extra_fields: Optional[List[str]] = None, + ): extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or [])) return ( FUNNEL_PERSONS_BY_STEP_SQL.format( diff --git a/posthog/queries/funnels/funnel_strict.py b/posthog/queries/funnels/funnel_strict.py index dd5c4db883437..38b5d3a4c6a09 100644 --- a/posthog/queries/funnels/funnel_strict.py +++ b/posthog/queries/funnels/funnel_strict.py @@ -18,7 +18,6 @@ def get_query(self): """ def get_step_counts_query(self): - steps_per_person_query = self.get_step_counts_without_aggregation_query() max_steps = len(self._filter.entities) breakdown_clause = self._get_breakdown_prop() diff --git a/posthog/queries/funnels/funnel_strict_persons.py b/posthog/queries/funnels/funnel_strict_persons.py index 716c27608eb3b..cca6f8e598dc8 100644 --- a/posthog/queries/funnels/funnel_strict_persons.py +++ b/posthog/queries/funnels/funnel_strict_persons.py @@ -15,7 +15,11 @@ class ClickhouseFunnelStrictActors(ClickhouseFunnelStrict, ActorBaseQuery): def aggregation_group_type_index(self): return self._filter.aggregation_group_type_index - def actor_query(self, limit_actors: Optional[bool] = True, extra_fields: Optional[List[str]] = None): + def actor_query( + self, + limit_actors: Optional[bool] = True, + extra_fields: Optional[List[str]] = None, + ): extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or [])) return ( FUNNEL_PERSONS_BY_STEP_SQL.format( diff --git a/posthog/queries/funnels/funnel_trends.py b/posthog/queries/funnels/funnel_trends.py index 3c6bbca8b79a1..d67b24ae78bbc 100644 --- a/posthog/queries/funnels/funnel_trends.py +++ b/posthog/queries/funnels/funnel_trends.py @@ -55,13 +55,11 @@ class ClickhouseFunnelTrends(ClickhouseFunnelBase): QUERY_TYPE = "funnel_trends" def __init__(self, filter: Filter, team: Team) -> None: - super().__init__(filter, team) self.funnel_order = get_funnel_order_class(filter)(filter, team) def _exec_query(self): - return self._summarize_data(super()._exec_query()) def get_step_counts_without_aggregation_query( @@ -97,7 +95,11 @@ def get_query(self) -> str: # Expects multiple rows for same person, first event time, steps taken. self.params.update(self.funnel_order.params) - reached_from_step_count_condition, reached_to_step_count_condition, _ = self.get_steps_reached_conditions() + ( + reached_from_step_count_condition, + reached_to_step_count_condition, + _, + ) = self.get_steps_reached_conditions() interval_func = get_interval_func_ch(self._filter.interval) if self._filter.date_from is None: @@ -157,10 +159,13 @@ def get_steps_reached_conditions(self) -> Tuple[str, str, str]: reached_to_step_count_condition = f"steps_completed >= {to_step+1}" # Those who dropped off did_not_reach_to_step_count_condition = f"{reached_from_step_count_condition} AND steps_completed < {to_step+1}" - return reached_from_step_count_condition, reached_to_step_count_condition, did_not_reach_to_step_count_condition + return ( + reached_from_step_count_condition, + reached_to_step_count_condition, + did_not_reach_to_step_count_condition, + ) def _summarize_data(self, results): - breakdown_clause = self._get_breakdown_prop() summary = [] @@ -185,7 +190,6 @@ def _summarize_data(self, results): return summary def _format_results(self, summary): - if self._filter.breakdown: grouper = lambda row: row["breakdown_value"] sorted_data = sorted(summary, key=grouper) diff --git a/posthog/queries/funnels/funnel_trends_persons.py b/posthog/queries/funnels/funnel_trends_persons.py index 46f2a9f1bf7df..0f4391eb041e4 100644 --- a/posthog/queries/funnels/funnel_trends_persons.py +++ b/posthog/queries/funnels/funnel_trends_persons.py @@ -50,7 +50,11 @@ def actor_query(self, limit_actors: Optional[bool] = True): # Expects multiple rows for same person, first event time, steps taken. self.params.update(self.funnel_order.params) - _, reached_to_step_count_condition, did_not_reach_to_step_count_condition = self.get_steps_reached_conditions() + ( + _, + reached_to_step_count_condition, + did_not_reach_to_step_count_condition, + ) = self.get_steps_reached_conditions() return ( FUNNEL_PERSONS_BY_STEP_SQL.format( diff --git a/posthog/queries/funnels/funnel_unordered.py b/posthog/queries/funnels/funnel_unordered.py index e72abdf40c220..ac3a6d939b09f 100644 --- a/posthog/queries/funnels/funnel_unordered.py +++ b/posthog/queries/funnels/funnel_unordered.py @@ -54,7 +54,6 @@ def _serialize_step( } def get_query(self): - max_steps = len(self._filter.entities) for exclusion in self._filter.exclusions: @@ -70,7 +69,6 @@ def get_query(self): """ def get_step_counts_query(self): - max_steps = len(self._filter.entities) union_query = self.get_step_counts_without_aggregation_query() @@ -140,7 +138,6 @@ def _get_step_times(self, max_steps: int): return f", {formatted}" if formatted else "" def get_sorting_condition(self, max_steps: int): - conditions = [] event_times_elements = [] diff --git a/posthog/queries/funnels/funnel_unordered_persons.py b/posthog/queries/funnels/funnel_unordered_persons.py index 972a8b9ec7f7c..334798c990208 100644 --- a/posthog/queries/funnels/funnel_unordered_persons.py +++ b/posthog/queries/funnels/funnel_unordered_persons.py @@ -22,7 +22,11 @@ def _get_funnel_person_step_events(self): return ", array() as matching_events" return "" - def actor_query(self, limit_actors: Optional[bool] = True, extra_fields: Optional[List[str]] = None): + def actor_query( + self, + limit_actors: Optional[bool] = True, + extra_fields: Optional[List[str]] = None, + ): extra_fields_string = ", ".join([self._get_timestamp_outer_select()] + (extra_fields or [])) return ( FUNNEL_PERSONS_BY_STEP_SQL.format( diff --git a/posthog/queries/funnels/test/breakdown_cases.py b/posthog/queries/funnels/test/breakdown_cases.py index 541b8dad1bfd1..273ce3b201601 100644 --- a/posthog/queries/funnels/test/breakdown_cases.py +++ b/posthog/queries/funnels/test/breakdown_cases.py @@ -8,7 +8,11 @@ from posthog.models.filters import Filter from posthog.queries.breakdown_props import ALL_USERS_COHORT_ID from posthog.queries.funnels.funnel_unordered import ClickhouseFunnelUnordered -from posthog.test.base import APIBaseTest, also_test_with_materialized_columns, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + also_test_with_materialized_columns, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for @@ -46,7 +50,10 @@ def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]: "breakdown": step.breakdown, "breakdown_value": step.breakdown, **( - {"action_id": None, "name": f"Completed {order+1} step{'s' if order > 0 else ''}"} + { + "action_id": None, + "name": f"Completed {order+1} step{'s' if order > 0 else ''}", + } if Funnel == ClickhouseFunnelUnordered else {} ), @@ -60,9 +67,12 @@ def funnel_result(step: FunnelStepResult, order: int) -> Dict[str, Any]: @also_test_with_materialized_columns(["$browser", "$browser_version"]) def test_funnel_step_multi_property_breakdown_event(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -79,36 +89,60 @@ def test_funnel_step_multi_property_breakdown_event(self): { "event": "sign up", "timestamp": datetime(2020, 1, 1, 12), - "properties": {"key": "val", "$browser": "Chrome", "$browser_version": 95}, + "properties": { + "key": "val", + "$browser": "Chrome", + "$browser_version": 95, + }, }, { "event": "play movie", "timestamp": datetime(2020, 1, 1, 13), - "properties": {"key": "val", "$browser": "Chrome", "$browser_version": 95}, + "properties": { + "key": "val", + "$browser": "Chrome", + "$browser_version": 95, + }, }, { "event": "buy", "timestamp": datetime(2020, 1, 1, 15), - "properties": {"key": "val", "$browser": "Chrome", "$browser_version": 95}, + "properties": { + "key": "val", + "$browser": "Chrome", + "$browser_version": 95, + }, }, ], "person2": [ { "event": "sign up", "timestamp": datetime(2020, 1, 2, 14), - "properties": {"key": "val", "$browser": "Safari", "$browser_version": 15}, + "properties": { + "key": "val", + "$browser": "Safari", + "$browser_version": 15, + }, }, { "event": "play movie", "timestamp": datetime(2020, 1, 2, 16), - "properties": {"key": "val", "$browser": "Safari", "$browser_version": 15}, + "properties": { + "key": "val", + "$browser": "Safari", + "$browser_version": 15, + }, }, ], "person3": [ { "event": "sign up", "timestamp": datetime(2020, 1, 2, 14), - "properties": {"key": "val", "$browser": "Safari", "$browser_version": 14}, + "properties": { + "key": "val", + "$browser": "Safari", + "$browser_version": 14, + }, } ], } @@ -126,7 +160,10 @@ def test_funnel_step_multi_property_breakdown_event(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Safari", "14"]), [people["person3"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Safari", "14"]), + [people["person3"].uuid], + ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Safari", "14"]), []) self._assert_funnel_breakdown_result_is_correct( @@ -143,8 +180,14 @@ def test_funnel_step_multi_property_breakdown_event(self): FunnelStepResult(name="buy", breakdown=["Safari", "15"], count=0), ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Safari", "15"]), [people["person2"].uuid]) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Safari", "15"]), [people["person2"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Safari", "15"]), + [people["person2"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, ["Safari", "15"]), + [people["person2"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[2], @@ -166,14 +209,23 @@ def test_funnel_step_multi_property_breakdown_event(self): ), ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Chrome", "95"]), [people["person1"].uuid]) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Chrome", "95"]), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Chrome", "95"]), + [people["person1"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, ["Chrome", "95"]), + [people["person1"].uuid], + ) @also_test_with_materialized_columns(["$browser"]) def test_funnel_step_breakdown_event_with_string_only_breakdown(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -248,8 +300,14 @@ def test_funnel_step_breakdown_event_with_string_only_breakdown(self): ), ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid]) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Chrome"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Chrome"), + [people["person1"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Chrome"), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[1], [ @@ -266,15 +324,22 @@ def test_funnel_step_breakdown_event_with_string_only_breakdown(self): ) self.assertCountEqual( - self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid, people["person3"].uuid] + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person2"].uuid, people["person3"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Safari"), + [people["person2"].uuid], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person2"].uuid]) @also_test_with_materialized_columns(["$browser"]) def test_funnel_step_breakdown_event(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -349,8 +414,14 @@ def test_funnel_step_breakdown_event(self): ), ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid]) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Chrome"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Chrome"), + [people["person1"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Chrome"), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[1], @@ -368,15 +439,22 @@ def test_funnel_step_breakdown_event(self): ) self.assertCountEqual( - self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid, people["person3"].uuid] + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person2"].uuid, people["person3"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Safari"), + [people["person2"].uuid], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person2"].uuid]) @also_test_with_materialized_columns(["$browser"]) def test_funnel_step_breakdown_event_with_other(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -391,16 +469,28 @@ def test_funnel_step_breakdown_event_with_other(self): events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, { "event": "play movie", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}, }, - {"event": "buy", "timestamp": datetime(2020, 1, 1, 15), "properties": {"$browser": "Chrome"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 1, 15), + "properties": {"$browser": "Chrome"}, + }, ], "person2": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Safari"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Safari"}, + }, { "event": "play movie", "timestamp": datetime(2020, 1, 2, 16), @@ -408,10 +498,18 @@ def test_funnel_step_breakdown_event_with_other(self): }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Safari"}} + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Safari"}, + } ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "random"}} + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "random"}, + } ], "person5": [ { @@ -443,9 +541,13 @@ def test_funnel_step_breakdown_event_with_other(self): ) self.assertCountEqual( - self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid, people["person3"].uuid] + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person2"].uuid, people["person3"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Safari"), + [people["person2"].uuid], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person2"].uuid]) self._assert_funnel_breakdown_result_is_correct( result[0], @@ -470,15 +572,25 @@ def test_funnel_step_breakdown_event_with_other(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 1, "Other"), - [people["person1"].uuid, people["person4"].uuid, people["person5"].uuid], + [ + people["person1"].uuid, + people["person4"].uuid, + people["person5"].uuid, + ], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Other"), + [people["person1"].uuid], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Other"), [people["person1"].uuid]) @also_test_with_materialized_columns(["$browser"]) def test_funnel_step_breakdown_event_no_type(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -491,16 +603,28 @@ def test_funnel_step_breakdown_event_no_type(self): events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, { "event": "play movie", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}, }, - {"event": "buy", "timestamp": datetime(2020, 1, 1, 15), "properties": {"$browser": "Chrome"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 1, 15), + "properties": {"$browser": "Chrome"}, + }, ], "person2": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Safari"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Safari"}, + }, { "event": "play movie", "timestamp": datetime(2020, 1, 2, 16), @@ -508,7 +632,11 @@ def test_funnel_step_breakdown_event_no_type(self): }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Safari"}} + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Safari"}, + } ], } @@ -537,8 +665,14 @@ def test_funnel_step_breakdown_event_no_type(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid]) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Chrome"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Chrome"), + [people["person1"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Chrome"), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[1], @@ -556,15 +690,22 @@ def test_funnel_step_breakdown_event_no_type(self): ) self.assertCountEqual( - self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid, people["person3"].uuid] + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person2"].uuid, people["person3"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Safari"), + [people["person2"].uuid], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person2"].uuid]) @also_test_with_materialized_columns(person_properties=["$browser"]) def test_funnel_step_breakdown_person(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -576,8 +717,16 @@ def test_funnel_step_breakdown_person(self): filter = Filter(data=filters) funnel = Funnel(filter, self.team) - person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk, properties={"$browser": "Chrome"}) - person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk, properties={"$browser": "Safari"}) + person1 = _create_person( + distinct_ids=["person1"], + team_id=self.team.pk, + properties={"$browser": "Chrome"}, + ) + person2 = _create_person( + distinct_ids=["person2"], + team_id=self.team.pk, + properties={"$browser": "Safari"}, + ) peoples_journeys = { "person1": [ @@ -638,9 +787,12 @@ def test_funnel_step_breakdown_person(self): @also_test_with_materialized_columns(["some_breakdown_val"]) def test_funnel_step_breakdown_limit(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -684,9 +836,12 @@ def test_funnel_step_breakdown_limit(self): @also_test_with_materialized_columns(["some_breakdown_val"]) def test_funnel_step_custom_breakdown_limit_with_nulls(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -738,9 +893,12 @@ def test_funnel_step_custom_breakdown_limit_with_nulls(self): @also_test_with_materialized_columns(["some_breakdown_val"]) def test_funnel_step_custom_breakdown_limit_with_nulls_included(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -797,7 +955,6 @@ def test_funnel_step_custom_breakdown_limit_with_nulls_included(self): @also_test_with_materialized_columns(["$browser"]) def test_funnel_step_breakdown_event_single_person_multiple_breakdowns(self): - filters = { "events": [{"id": "sign up", "order": 0}], "insight": INSIGHT_FUNNELS, @@ -816,11 +973,27 @@ def test_funnel_step_breakdown_event_single_person_multiple_breakdowns(self): # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Safari"}}, - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 13), + "properties": {"$browser": "Safari"}, + }, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, # mixed property type! - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, ] } people = journeys_for(events_by_person, self.team) @@ -835,27 +1008,38 @@ def test_funnel_step_breakdown_event_single_person_multiple_breakdowns(self): self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "0"), [people["person1"].uuid]) self._assert_funnel_breakdown_result_is_correct( - result[1], [FunnelStepResult(name="sign up", count=1, breakdown=["Chrome"])] + result[1], + [FunnelStepResult(name="sign up", count=1, breakdown=["Chrome"])], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Chrome"), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( - result[2], [FunnelStepResult(name="sign up", count=1, breakdown=["Mac"])] + result[2], + [FunnelStepResult(name="sign up", count=1, breakdown=["Mac"])], ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Mac"), [people["person1"].uuid]) self._assert_funnel_breakdown_result_is_correct( - result[3], [FunnelStepResult(name="sign up", count=1, breakdown=["Safari"])] + result[3], + [FunnelStepResult(name="sign up", count=1, breakdown=["Safari"])], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person1"].uuid], + ) def test_funnel_step_breakdown_event_single_person_events_with_multiple_properties(self): - filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -907,7 +1091,10 @@ def test_funnel_step_breakdown_event_single_person_events_with_multiple_properti ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Chrome"), + [people["person1"].uuid], + ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Chrome"), []) self._assert_funnel_breakdown_result_is_correct( @@ -924,13 +1111,23 @@ def test_funnel_step_breakdown_event_single_person_events_with_multiple_properti ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person1"].uuid]) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, "Safari"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person1"].uuid], + ) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2, "Safari"), + [people["person1"].uuid], + ) @also_test_with_materialized_columns(person_properties=["key"], verify_no_jsonextract=False) def test_funnel_cohort_breakdown(self): # This caused some issues with SQL parsing - _create_person(distinct_ids=[f"person1"], team_id=self.team.pk, properties={"key": "value"}) + _create_person( + distinct_ids=[f"person1"], + team_id=self.team.pk, + properties={"key": "value"}, + ) people = journeys_for( {"person1": [{"event": "sign up", "timestamp": datetime(2020, 1, 2, 12)}]}, self.team, @@ -943,7 +1140,11 @@ def test_funnel_cohort_breakdown(self): groups=[{"properties": [{"key": "key", "value": "value", "type": "person"}]}], ) filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -962,15 +1163,25 @@ def test_funnel_cohort_breakdown(self): self.assertEqual(result[0][0]["breakdown"], "all users") self.assertEqual(len(result[1]), 3) self.assertEqual(result[1][0]["breakdown"], "test_cohort") - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, cohort.pk), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, cohort.pk), + [people["person1"].uuid], + ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, cohort.pk), []) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ALL_USERS_COHORT_ID), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ALL_USERS_COHORT_ID), + [people["person1"].uuid], + ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ALL_USERS_COHORT_ID), []) # non array filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -985,11 +1196,13 @@ def test_funnel_cohort_breakdown(self): self.assertEqual(len(result[0]), 3) self.assertEqual(result[0][0]["breakdown"], "test_cohort") self.assertEqual(result[0][0]["breakdown_value"], cohort.pk) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, cohort.pk), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, cohort.pk), + [people["person1"].uuid], + ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, cohort.pk), []) def test_basic_funnel_default_funnel_days_breakdown_event(self): - events_by_person = { "user_1": [ { @@ -1051,7 +1264,11 @@ def test_basic_funnel_default_funnel_days_breakdown_event(self): self._assert_funnel_breakdown_result_is_correct( result[0], [ - FunnelStepResult(name="user signed up", count=1, breakdown=["https://posthog.com/docs/x"]), + FunnelStepResult( + name="user signed up", + count=1, + breakdown=["https://posthog.com/docs/x"], + ), FunnelStepResult( name="paid", count=1, @@ -1139,7 +1356,6 @@ def test_basic_funnel_default_funnel_days_breakdown_action(self): ) def test_funnel_step_breakdown_with_first_touch_attribution(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -1157,21 +1373,41 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self): # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)}, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 13), + "properties": {"$browser": "Safari"}, + }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}, ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # first touch means alakazam is disregarded - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], # no properties dude, represented by '' "person5": [ @@ -1191,7 +1427,11 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self): [ FunnelStepResult(name="sign up", breakdown=[""], count=1), FunnelStepResult( - name="buy", breakdown=[""], count=1, average_conversion_time=3600, median_conversion_time=3600 + name="buy", + breakdown=[""], + count=1, + average_conversion_time=3600, + median_conversion_time=3600, ), ], ) @@ -1203,7 +1443,11 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self): [ FunnelStepResult(name="sign up", breakdown=["0"], count=1), FunnelStepResult( - name="buy", breakdown=["0"], count=1, average_conversion_time=3600, median_conversion_time=3600 + name="buy", + breakdown=["0"], + count=1, + average_conversion_time=3600, + median_conversion_time=3600, ), ], ) @@ -1224,7 +1468,10 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Chrome"), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[3], @@ -1256,10 +1503,12 @@ def test_funnel_step_breakdown_with_first_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person2"].uuid], + ) def test_funnel_step_breakdown_with_last_touch_attribution(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -1277,21 +1526,41 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self): # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)}, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 13), + "properties": {"$browser": "Safari"}, + }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}, ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # last touch means 0 is disregarded - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "Alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "Alakazam"}, + }, ], # no properties dude, represented by '' "person5": [ @@ -1311,7 +1580,11 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self): [ FunnelStepResult(name="sign up", breakdown=[""], count=1), FunnelStepResult( - name="buy", breakdown=[""], count=1, average_conversion_time=3600, median_conversion_time=3600 + name="buy", + breakdown=[""], + count=1, + average_conversion_time=3600, + median_conversion_time=3600, ), ], ) @@ -1332,7 +1605,10 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Alakazam"), [people["person4"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Alakazam"), + [people["person4"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[2], @@ -1348,7 +1624,10 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Chrome"), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[3], @@ -1380,10 +1659,12 @@ def test_funnel_step_breakdown_with_last_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person2"].uuid], + ) def test_funnel_step_breakdown_with_step_attribution(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -1402,21 +1683,41 @@ def test_funnel_step_breakdown_with_step_attribution(self): # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)}, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 13), + "properties": {"$browser": "Safari"}, + }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}, ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # step attribution means alakazam is valid when step = 1 - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], } people = journeys_for(events_by_person, self.team) @@ -1431,7 +1732,11 @@ def test_funnel_step_breakdown_with_step_attribution(self): [ FunnelStepResult(name="sign up", breakdown=[""], count=1), FunnelStepResult( - name="buy", breakdown=[""], count=1, average_conversion_time=86400, median_conversion_time=86400 + name="buy", + breakdown=[""], + count=1, + average_conversion_time=86400, + median_conversion_time=86400, ), ], ) @@ -1443,7 +1748,11 @@ def test_funnel_step_breakdown_with_step_attribution(self): [ FunnelStepResult(name="sign up", breakdown=["0"], count=1), FunnelStepResult( - name="buy", breakdown=["0"], count=1, average_conversion_time=3600, median_conversion_time=3600 + name="buy", + breakdown=["0"], + count=1, + average_conversion_time=3600, + median_conversion_time=3600, ), ], ) @@ -1464,7 +1773,10 @@ def test_funnel_step_breakdown_with_step_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Chrome"), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Chrome"), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[3], @@ -1483,7 +1795,6 @@ def test_funnel_step_breakdown_with_step_attribution(self): self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Mac"), [people["person3"].uuid]) def test_funnel_step_breakdown_with_step_one_attribution(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -1502,21 +1813,41 @@ def test_funnel_step_breakdown_with_step_one_attribution(self): # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)}, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 13), + "properties": {"$browser": "Safari"}, + }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}, ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # step attribution means alakazam is valid when step = 1 - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], } people = journeys_for(events_by_person, self.team) @@ -1532,13 +1863,18 @@ def test_funnel_step_breakdown_with_step_one_attribution(self): [ FunnelStepResult(name="sign up", breakdown=[""], count=2), FunnelStepResult( - name="buy", breakdown=[""], count=2, average_conversion_time=3600, median_conversion_time=3600 + name="buy", + breakdown=[""], + count=2, + average_conversion_time=3600, + median_conversion_time=3600, ), ], ) self.assertCountEqual( - self._get_actor_ids_at_step(filter, 1, ""), [people["person1"].uuid, people["person3"].uuid] + self._get_actor_ids_at_step(filter, 1, ""), + [people["person1"].uuid, people["person3"].uuid], ) self._assert_funnel_breakdown_result_is_correct( @@ -1555,7 +1891,10 @@ def test_funnel_step_breakdown_with_step_one_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "Safari"), [people["person2"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "Safari"), + [people["person2"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[2], @@ -1571,10 +1910,12 @@ def test_funnel_step_breakdown_with_step_one_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "alakazam"), [people["person4"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "alakazam"), + [people["person4"].uuid], + ) def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -1608,8 +1949,16 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self): }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$version": "no-mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$version": "no-mac"}, + }, ], "person4": [ { @@ -1617,7 +1966,11 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self): "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0, "$version": 0}, }, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], # no properties dude, represented by '' "person5": [ @@ -1646,7 +1999,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["", ""]), [people["person5"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["", ""]), + [people["person5"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[1], @@ -1661,7 +2017,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self): ), ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["0", "0"]), [people["person4"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["0", "0"]), + [people["person4"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[2], @@ -1677,7 +2036,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Chrome", "xyz"]), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Chrome", "xyz"]), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[3], @@ -1693,7 +2055,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Mac", ""]), [people["person3"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Mac", ""]), + [people["person3"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[4], @@ -1709,10 +2074,12 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Safari", "xyz"]), [people["person2"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Safari", "xyz"]), + [people["person2"].uuid], + ) def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_funnel(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -1746,7 +2113,11 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_ }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$version": "no-mac"}}, ], "person4": [ @@ -1784,7 +2155,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_ ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["", ""]), [people["person5"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["", ""]), + [people["person5"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[1], @@ -1793,7 +2167,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_ FunnelStepResult(name="buy", breakdown=["0", "0"], count=0), ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["0", "0"]), [people["person4"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["0", "0"]), + [people["person4"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[2], @@ -1809,7 +2186,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_ ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Chrome", "xyz"]), [people["person1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Chrome", "xyz"]), + [people["person1"].uuid], + ) self._assert_funnel_breakdown_result_is_correct( result[3], @@ -1819,7 +2199,10 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_ ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Mac", ""]), [people["person3"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Mac", ""]), + [people["person3"].uuid], + ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Mac", ""]), []) self._assert_funnel_breakdown_result_is_correct( @@ -1836,10 +2219,12 @@ def test_funnel_step_multiple_breakdown_with_first_touch_attribution_incomplete_ ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, ["Safari", "xyz"]), [people["person2"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, ["Safari", "xyz"]), + [people["person2"].uuid], + ) def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -1858,7 +2243,11 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self) # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ @@ -1866,13 +2255,25 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self) # {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}} ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)} ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # step attribution means alakazam is valid when step = 1 - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], } people = journeys_for(events_by_person, self.team) @@ -1889,7 +2290,11 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self) [ FunnelStepResult(name="sign up", breakdown=[""], count=1), FunnelStepResult( - name="buy", breakdown=[""], count=1, average_conversion_time=3600, median_conversion_time=3600 + name="buy", + breakdown=[""], + count=1, + average_conversion_time=3600, + median_conversion_time=3600, ), ], ) @@ -1910,10 +2315,12 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self) ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "alakazam"), [people["person4"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "alakazam"), + [people["person4"].uuid], + ) def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_funnel(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "buy", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -1932,7 +2339,11 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ @@ -1940,13 +2351,25 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu # {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}} ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)} ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # step attribution means alakazam is valid when step = 1 - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], } people = journeys_for(events_by_person, self.team) @@ -1963,7 +2386,11 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu [ FunnelStepResult(name="sign up", breakdown=[""], count=1), FunnelStepResult( - name="buy", breakdown=[""], count=1, average_conversion_time=3600, median_conversion_time=3600 + name="buy", + breakdown=[""], + count=1, + average_conversion_time=3600, + median_conversion_time=3600, ), ], ) @@ -1984,7 +2411,10 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1, "alakazam"), [people["person4"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1, "alakazam"), + [people["person4"].uuid], + ) @snapshot_clickhouse_queries def test_funnel_step_multiple_breakdown_snapshot(self): @@ -2023,8 +2453,16 @@ def test_funnel_step_multiple_breakdown_snapshot(self): }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$version": "no-mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$version": "no-mac"}, + }, ], "person4": [ { @@ -2032,7 +2470,11 @@ def test_funnel_step_multiple_breakdown_snapshot(self): "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0, "$version": 0}, }, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], # no properties dude, represented by '' "person5": [ @@ -2054,7 +2496,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen(self): filters = { "events": [ {"id": "sign up", "order": 0}, - {"id": "buy", "properties": [{"type": "event", "key": "$version", "value": "xyz"}], "order": 1}, + { + "id": "buy", + "properties": [{"type": "event", "key": "$version", "value": "xyz"}], + "order": 1, + }, ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", @@ -2076,7 +2522,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen(self): "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome", "$version": "xyz"}, }, - {"event": "buy", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 1, 13), + "properties": {"$browser": "Chrome"}, + }, # discarded at step 1 because doesn't meet criteria ], "person2": [ @@ -2088,7 +2538,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen(self): }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, { "event": "buy", "timestamp": datetime(2020, 1, 2, 15), @@ -2108,7 +2562,10 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen(self): self.assertEqual(len(result), 4) - self.assertCountEqual([res[0]["breakdown"] for res in result], [["Mac"], ["Chrome"], ["Safari"], [""]]) + self.assertCountEqual( + [res[0]["breakdown"] for res in result], + [["Mac"], ["Chrome"], ["Safari"], [""]], + ) @snapshot_clickhouse_queries def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): @@ -2117,7 +2574,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): filters = { "events": [ {"id": "sign up", "order": 0}, - {"id": "buy", "properties": [{"type": "event", "key": "$version", "value": "xyz"}], "order": 1}, + { + "id": "buy", + "properties": [{"type": "event", "key": "$version", "value": "xyz"}], + "order": 1, + }, ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", @@ -2140,7 +2601,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome", "$version": "xyz"}, }, - {"event": "buy", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 1, 13), + "properties": {"$browser": "Chrome"}, + }, # discarded because doesn't meet criteria ], "person2": [ @@ -2152,7 +2617,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, { "event": "buy", "timestamp": datetime(2020, 1, 2, 15), @@ -2192,7 +2661,6 @@ def assert_funnel_results_equal(left: List[Dict[str, Any]], right: List[Dict[str """ def _filter(steps: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - return [{**step, "converted_people_url": None, "dropped_people_url": None} for step in steps] assert len(left) == len(right) @@ -2204,5 +2672,8 @@ def _filter(steps: List[Dict[str, Any]]) -> List[Dict[str, Any]]: try: assert item[key] == other[key] except AssertionError as e: - e.args += (f"failed comparing ${key}", f'Got "{item[key]}" and "{other[key]}"') + e.args += ( + f"failed comparing ${key}", + f'Got "{item[key]}" and "{other[key]}"', + ) raise diff --git a/posthog/queries/funnels/test/conversion_time_cases.py b/posthog/queries/funnels/test/conversion_time_cases.py index 278dfd989a724..02e8167818373 100644 --- a/posthog/queries/funnels/test/conversion_time_cases.py +++ b/posthog/queries/funnels/test/conversion_time_cases.py @@ -34,14 +34,26 @@ def test_funnel_with_multiple_incomplete_tries(self): { "person1": [ # person1 completed funnel on 2021-05-01 - {"event": "user signed up", "timestamp": datetime(2021, 5, 1, 1)}, + { + "event": "user signed up", + "timestamp": datetime(2021, 5, 1, 1), + }, {"event": "$pageview", "timestamp": datetime(2021, 5, 1, 2)}, - {"event": "something else", "timestamp": datetime(2021, 5, 1, 3)}, + { + "event": "something else", + "timestamp": datetime(2021, 5, 1, 3), + }, # person1 completed part of funnel on 2021-05-03 and took 2 hours to convert - {"event": "user signed up", "timestamp": datetime(2021, 5, 3, 4)}, + { + "event": "user signed up", + "timestamp": datetime(2021, 5, 3, 4), + }, {"event": "$pageview", "timestamp": datetime(2021, 5, 3, 5)}, # person1 completed part of funnel on 2021-05-04 and took 3 hours to convert - {"event": "user signed up", "timestamp": datetime(2021, 5, 4, 7)}, + { + "event": "user signed up", + "timestamp": datetime(2021, 5, 4, 7), + }, {"event": "$pageview", "timestamp": datetime(2021, 5, 4, 10)}, ] }, @@ -61,7 +73,11 @@ def test_funnel_with_multiple_incomplete_tries(self): def test_funnel_step_conversion_times(self): filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", @@ -121,13 +137,27 @@ def test_funnel_times_with_different_conversion_windows(self): people = journeys_for( { "stopped_after_signup1": [ - {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14)}, + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 14), + }, {"event": "pageview", "timestamp": datetime(2020, 1, 2, 14, 5)}, ], - "stopped_after_signup2": [{"event": "user signed up", "timestamp": datetime(2020, 1, 2, 14, 3)}], + "stopped_after_signup2": [ + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 14, 3), + } + ], "stopped_after_signup3": [ - {"event": "user signed up", "timestamp": datetime(2020, 1, 2, 12)}, - {"event": "pageview", "timestamp": datetime(2020, 1, 2, 12, 15)}, + { + "event": "user signed up", + "timestamp": datetime(2020, 1, 2, 12), + }, + { + "event": "pageview", + "timestamp": datetime(2020, 1, 2, 12, 15), + }, ], }, self.team, @@ -149,7 +179,10 @@ def test_funnel_times_with_different_conversion_windows(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 2), - [people["stopped_after_signup1"].uuid, people["stopped_after_signup3"].uuid], + [ + people["stopped_after_signup1"].uuid, + people["stopped_after_signup3"].uuid, + ], ) filter = filter.shallow_clone({"funnel_window_interval": 5, "funnel_window_interval_unit": "minute"}) @@ -171,6 +204,9 @@ def test_funnel_times_with_different_conversion_windows(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [people["stopped_after_signup1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2), + [people["stopped_after_signup1"].uuid], + ) return TestFunnelConversionTime diff --git a/posthog/queries/funnels/test/test_breakdowns_by_current_url.py b/posthog/queries/funnels/test/test_breakdowns_by_current_url.py index 098d51ddecddd..bb6673387b64d 100644 --- a/posthog/queries/funnels/test/test_breakdowns_by_current_url.py +++ b/posthog/queries/funnels/test/test_breakdowns_by_current_url.py @@ -3,7 +3,11 @@ from posthog.models import Filter from posthog.queries.funnels import ClickhouseFunnel -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for @@ -16,13 +20,19 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com", "$pathname": ""}, + "properties": { + "$current_url": "https://example.com", + "$pathname": "", + }, }, # trailing question mark { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 2), - "properties": {"$current_url": "https://example.com?", "$pathname": "?"}, + "properties": { + "$current_url": "https://example.com?", + "$pathname": "?", + }, }, { "event": "terminate funnel", @@ -34,13 +44,19 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com/", "$pathname": "/"}, + "properties": { + "$current_url": "https://example.com/", + "$pathname": "/", + }, }, # trailing hash { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 2), - "properties": {"$current_url": "https://example.com#", "$pathname": "#"}, + "properties": { + "$current_url": "https://example.com#", + "$pathname": "#", + }, }, { "event": "terminate funnel", @@ -52,7 +68,10 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com/home", "$pathname": "/home"}, + "properties": { + "$current_url": "https://example.com/home", + "$pathname": "/home", + }, }, { "event": "terminate funnel", @@ -64,19 +83,28 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com/home/", "$pathname": "/home/"}, + "properties": { + "$current_url": "https://example.com/home/", + "$pathname": "/home/", + }, }, # trailing hash { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 2), - "properties": {"$current_url": "https://example.com/home#", "$pathname": "/home#"}, + "properties": { + "$current_url": "https://example.com/home#", + "$pathname": "/home#", + }, }, # all the things { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 3), - "properties": {"$current_url": "https://example.com/home/?#", "$pathname": "/home/?#"}, + "properties": { + "$current_url": "https://example.com/home/?#", + "$pathname": "/home/?#", + }, }, { "event": "terminate funnel", @@ -92,7 +120,13 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}): Filter( data={ "events": [ - {"id": "watched movie", "name": "watched movie", "type": "events", "order": 0, **events_extra}, + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + "order": 0, + **events_extra, + }, { "id": "terminate funnel", "name": "terminate funnel", @@ -115,12 +149,24 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}): @snapshot_clickhouse_queries def test_breakdown_by_pathname(self) -> None: - response = self._run({"breakdown": "$pathname", "breakdown_type": "event", "breakdown_normalize_url": True}) + response = self._run( + { + "breakdown": "$pathname", + "breakdown_type": "event", + "breakdown_normalize_url": True, + } + ) actual = [] for breakdown_value in response: for funnel_step in breakdown_value: - actual.append((funnel_step["name"], funnel_step["count"], funnel_step["breakdown"])) + actual.append( + ( + funnel_step["name"], + funnel_step["count"], + funnel_step["breakdown"], + ) + ) assert actual == [ ("watched movie", 2, ["/"]), @@ -131,12 +177,24 @@ def test_breakdown_by_pathname(self) -> None: @snapshot_clickhouse_queries def test_breakdown_by_current_url(self) -> None: - response = self._run({"breakdown": "$current_url", "breakdown_type": "event", "breakdown_normalize_url": True}) + response = self._run( + { + "breakdown": "$current_url", + "breakdown_type": "event", + "breakdown_normalize_url": True, + } + ) actual = [] for breakdown_value in response: for funnel_step in breakdown_value: - actual.append((funnel_step["name"], funnel_step["count"], funnel_step["breakdown"])) + actual.append( + ( + funnel_step["name"], + funnel_step["count"], + funnel_step["breakdown"], + ) + ) assert actual == [ ("watched movie", 2, ["https://example.com/home"]), diff --git a/posthog/queries/funnels/test/test_funnel.py b/posthog/queries/funnels/test/test_funnel.py index deddf642a4c50..334f0dc9c41c0 100644 --- a/posthog/queries/funnels/test/test_funnel.py +++ b/posthog/queries/funnels/test/test_funnel.py @@ -13,8 +13,13 @@ from posthog.models.filters import Filter from posthog.models.instance_setting import get_instance_setting from posthog.queries.funnels import ClickhouseFunnel, ClickhouseFunnelActors -from posthog.queries.funnels.test.breakdown_cases import assert_funnel_results_equal, funnel_breakdown_test_factory -from posthog.queries.funnels.test.conversion_time_cases import funnel_conversion_time_test_factory +from posthog.queries.funnels.test.breakdown_cases import ( + assert_funnel_results_equal, + funnel_breakdown_test_factory, +) +from posthog.queries.funnels.test.conversion_time_cases import ( + funnel_conversion_time_test_factory, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -36,12 +41,26 @@ def _create_action(**kwargs): return action -class TestFunnelBreakdown(ClickhouseTestMixin, funnel_breakdown_test_factory(ClickhouseFunnel, ClickhouseFunnelActors, _create_event, _create_action, _create_person)): # type: ignore +class TestFunnelBreakdown( + ClickhouseTestMixin, + funnel_breakdown_test_factory( # type: ignore + ClickhouseFunnel, + ClickhouseFunnelActors, + _create_event, + _create_action, + _create_person, + ), +): maxDiff = None pass -class TestFunnelConversionTime(ClickhouseTestMixin, funnel_conversion_time_test_factory(ClickhouseFunnel, ClickhouseFunnelActors, _create_event, _create_person)): # type: ignore +class TestFunnelConversionTime( + ClickhouseTestMixin, + funnel_conversion_time_test_factory( # type: ignore + ClickhouseFunnel, ClickhouseFunnelActors, _create_event, _create_person + ), +): maxDiff = None pass @@ -96,10 +115,18 @@ def _single_step_funnel(self, properties=None, filters=None): def _basic_funnel(self, properties=None, filters=None): action_credit_card = Action.objects.create(team=self.team, name="paid") ActionStep.objects.create( - action=action_credit_card, event="$autocapture", tag_name="button", text="Pay $10" + action=action_credit_card, + event="$autocapture", + tag_name="button", + text="Pay $10", ) action_play_movie = Action.objects.create(team=self.team, name="watched movie") - ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie") + ActionStep.objects.create( + action=action_play_movie, + event="$autocapture", + tag_name="a", + href="/movie", + ) if filters is None: filters = { @@ -157,7 +184,10 @@ def test_funnel_events(self): self._signup_event(distinct_id="stopped_after_pay") self._pay_event(distinct_id="stopped_after_pay") - person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk) + person_factory( + distinct_ids=["had_anonymous_id", "completed_movie"], + team_id=self.team.pk, + ) self._signup_event(distinct_id="had_anonymous_id") self._pay_event(distinct_id="completed_movie") self._movie_event(distinct_id="completed_movie") @@ -193,18 +223,30 @@ def test_funnel_events_with_person_on_events_v2(self): # events stopped_after_signup_person_id = uuid.uuid4() person_factory(distinct_ids=["stopped_after_signup"], team_id=self.team.pk) - self._signup_event(distinct_id="stopped_after_signup", person_id=stopped_after_signup_person_id) + self._signup_event( + distinct_id="stopped_after_signup", + person_id=stopped_after_signup_person_id, + ) with freeze_time("2012-01-01T03:21:36.000Z"): stopped_after_pay_person_id = uuid.uuid4() person_factory(distinct_ids=["stopped_after_pay"], team_id=self.team.pk) - self._signup_event(distinct_id="stopped_after_pay", person_id=stopped_after_pay_person_id) + self._signup_event( + distinct_id="stopped_after_pay", + person_id=stopped_after_pay_person_id, + ) with freeze_time("2012-01-01T03:21:37.000Z"): - self._pay_event(distinct_id="stopped_after_pay", person_id=stopped_after_pay_person_id) + self._pay_event( + distinct_id="stopped_after_pay", + person_id=stopped_after_pay_person_id, + ) with freeze_time("2012-01-01T03:21:38.000Z"): had_anonymous_id_person_id = uuid.uuid4() - person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk) + person_factory( + distinct_ids=["had_anonymous_id", "completed_movie"], + team_id=self.team.pk, + ) self._signup_event(distinct_id="had_anonymous_id", person_id=had_anonymous_id_person_id) with freeze_time("2012-01-01T03:21:39.000Z"): self._pay_event(distinct_id="completed_movie", person_id=had_anonymous_id_person_id) @@ -243,7 +285,12 @@ def test_funnel_events_with_person_on_events_v2(self): def test_funnel_with_messed_up_order(self): action_play_movie = Action.objects.create(team=self.team, name="watched movie") - ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie") + ActionStep.objects.create( + action=action_play_movie, + event="$autocapture", + tag_name="a", + href="/movie", + ) funnel = self._basic_funnel( filters={ @@ -261,7 +308,10 @@ def test_funnel_with_messed_up_order(self): self._signup_event(distinct_id="stopped_after_pay") self._movie_event(distinct_id="completed_movie") - person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk) + person_factory( + distinct_ids=["had_anonymous_id", "completed_movie"], + team_id=self.team.pk, + ) self._signup_event(distinct_id="had_anonymous_id") self._movie_event(distinct_id="completed_movie") @@ -323,7 +373,12 @@ def test_funnel_with_any_event(self): def test_funnel_with_new_entities_that_mess_up_order(self): action_play_movie = Action.objects.create(team=self.team, name="watched movie") - ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie") + ActionStep.objects.create( + action=action_play_movie, + event="$autocapture", + tag_name="a", + href="/movie", + ) funnel = self._basic_funnel( filters={ @@ -345,7 +400,10 @@ def test_funnel_with_new_entities_that_mess_up_order(self): self._signup_event(distinct_id="stopped_after_pay") self._movie_event(distinct_id="completed_movie") - person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk) + person_factory( + distinct_ids=["had_anonymous_id", "completed_movie"], + team_id=self.team.pk, + ) self._signup_event(distinct_id="had_anonymous_id") self._movie_event(distinct_id="completed_movie") @@ -405,10 +463,18 @@ def test_funnel_prop_filters(self): def test_funnel_prop_filters_per_entity(self): action_credit_card = Action.objects.create(team_id=self.team.pk, name="paid") ActionStep.objects.create( - action=action_credit_card, event="$autocapture", tag_name="button", text="Pay $10" + action=action_credit_card, + event="$autocapture", + tag_name="button", + text="Pay $10", ) action_play_movie = Action.objects.create(team_id=self.team.pk, name="watched movie") - ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie") + ActionStep.objects.create( + action=action_play_movie, + event="$autocapture", + tag_name="a", + href="/movie", + ) filters = { "events": [ { @@ -417,7 +483,11 @@ def test_funnel_prop_filters_per_entity(self): "order": 0, "properties": [ {"key": "$browser", "value": "Safari"}, - {"key": "$browser", "operator": "is_not", "value": "Chrome"}, + { + "key": "$browser", + "operator": "is_not", + "value": "Chrome", + }, ], } ], @@ -440,7 +510,11 @@ def test_funnel_prop_filters_per_entity(self): funnel = self._basic_funnel(filters=filters) # events - person_factory(distinct_ids=["with_property"], team_id=self.team.pk, properties={"$browser": "Safari"}) + person_factory( + distinct_ids=["with_property"], + team_id=self.team.pk, + properties={"$browser": "Safari"}, + ) self._signup_event(distinct_id="with_property", properties={"$browser": "Safari"}) self._pay_event(distinct_id="with_property", properties={"$browser": "Safari"}) self._movie_event(distinct_id="with_property") @@ -466,17 +540,31 @@ def test_funnel_prop_filters_per_entity(self): def test_funnel_person_prop(self): action_credit_card = Action.objects.create(team_id=self.team.pk, name="paid") ActionStep.objects.create( - action=action_credit_card, event="$autocapture", tag_name="button", text="Pay $10" + action=action_credit_card, + event="$autocapture", + tag_name="button", + text="Pay $10", ) action_play_movie = Action.objects.create(team_id=self.team.pk, name="watched movie") - ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie") + ActionStep.objects.create( + action=action_play_movie, + event="$autocapture", + tag_name="a", + href="/movie", + ) filters = { "events": [ { "id": "user signed up", "type": "events", "order": 0, - "properties": [{"key": "email", "value": "hello@posthog.com", "type": "person"}], + "properties": [ + { + "key": "email", + "value": "hello@posthog.com", + "type": "person", + } + ], } ], "actions": [ @@ -489,7 +577,9 @@ def test_funnel_person_prop(self): # events person_factory( - distinct_ids=["with_property"], team_id=self.team.pk, properties={"email": "hello@posthog.com"} + distinct_ids=["with_property"], + team_id=self.team.pk, + properties={"email": "hello@posthog.com"}, ) self._signup_event(distinct_id="with_property") self._pay_event(distinct_id="with_property") @@ -507,18 +597,34 @@ def test_funnel_multiple_actions(self): # This test prevents a regression person_factory(distinct_ids=["person1"], team_id=self.team.pk) event_factory(distinct_id="person1", event="event1", team=self.team) - event_factory(distinct_id="person1", event="event2", properties={"test_propX": "a"}, team=self.team) + event_factory( + distinct_id="person1", + event="event2", + properties={"test_propX": "a"}, + team=self.team, + ) action1 = Action.objects.create(team_id=self.team.pk, name="event2") - ActionStep.objects.create(action=action1, event="event2", properties=[{"key": "test_propX", "value": "a"}]) + ActionStep.objects.create( + action=action1, + event="event2", + properties=[{"key": "test_propX", "value": "a"}], + ) action2 = Action.objects.create(team_id=self.team.pk, name="event2") - ActionStep.objects.create(action=action2, event="event2", properties=[{"key": "test_propX", "value": "c"}]) + ActionStep.objects.create( + action=action2, + event="event2", + properties=[{"key": "test_propX", "value": "c"}], + ) result = Funnel( filter=Filter( data={ "events": [{"id": "event1", "order": 0}], - "actions": [{"id": action1.pk, "order": 1}, {"id": action2.pk, "order": 2}], + "actions": [ + {"id": action1.pk, "order": 1}, + {"id": action2.pk, "order": 2}, + ], "insight": INSIGHT_FUNNELS, "funnel_window_days": 14, } @@ -531,7 +637,11 @@ def test_funnel_multiple_actions(self): @also_test_with_materialized_columns(person_properties=["email"]) def test_funnel_filter_test_accounts(self): - person_factory(distinct_ids=["person1"], team_id=self.team.pk, properties={"email": "test@posthog.com"}) + person_factory( + distinct_ids=["person1"], + team_id=self.team.pk, + properties={"email": "test@posthog.com"}, + ) person_factory(distinct_ids=["person2"], team_id=self.team.pk) event_factory(distinct_id="person1", event="event1", team=self.team) event_factory(distinct_id="person2", event="event1", team=self.team) @@ -551,8 +661,16 @@ def test_funnel_filter_test_accounts(self): @also_test_with_materialized_columns(person_properties=["email"]) def test_funnel_with_entity_person_property_filters(self): - person_factory(distinct_ids=["person1"], team_id=self.team.pk, properties={"email": "test@posthog.com"}) - person_factory(distinct_ids=["person2"], team_id=self.team.pk, properties={"email": "another@example.com"}) + person_factory( + distinct_ids=["person1"], + team_id=self.team.pk, + properties={"email": "test@posthog.com"}, + ) + person_factory( + distinct_ids=["person2"], + team_id=self.team.pk, + properties={"email": "another@example.com"}, + ) person_factory(distinct_ids=["person3"], team_id=self.team.pk) event_factory(distinct_id="person1", event="event1", team=self.team) event_factory(distinct_id="person2", event="event1", team=self.team) @@ -566,7 +684,12 @@ def test_funnel_with_entity_person_property_filters(self): "id": "event1", "order": 0, "properties": [ - {"key": "email", "value": "is_set", "operator": "is_set", "type": "person"} + { + "key": "email", + "value": "is_set", + "operator": "is_set", + "type": "person", + } ], } ], @@ -580,8 +703,16 @@ def test_funnel_with_entity_person_property_filters(self): @also_test_with_materialized_columns(person_properties=["email"], verify_no_jsonextract=False) def test_funnel_filter_by_action_with_person_properties(self): - person_factory(distinct_ids=["person1"], team_id=self.team.pk, properties={"email": "test@posthog.com"}) - person_factory(distinct_ids=["person2"], team_id=self.team.pk, properties={"email": "another@example.com"}) + person_factory( + distinct_ids=["person1"], + team_id=self.team.pk, + properties={"email": "test@posthog.com"}, + ) + person_factory( + distinct_ids=["person2"], + team_id=self.team.pk, + properties={"email": "another@example.com"}, + ) person_factory(distinct_ids=["person3"], team_id=self.team.pk) event_factory(distinct_id="person1", event="event1", team=self.team) event_factory(distinct_id="person2", event="event1", team=self.team) @@ -591,7 +722,14 @@ def test_funnel_filter_by_action_with_person_properties(self): ActionStep.objects.create( action=action, event="event1", - properties=[{"key": "email", "value": "is_set", "operator": "is_set", "type": "person"}], + properties=[ + { + "key": "email", + "value": "is_set", + "operator": "is_set", + "type": "person", + } + ], ) result = Funnel( @@ -624,9 +762,17 @@ def test_basic_funnel_default_funnel_days(self): # event _create_person(distinct_ids=["user_1"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id="user_1", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="user_1", + timestamp="2020-01-10T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z") result = funnel.run() @@ -653,11 +799,23 @@ def test_basic_funnel_with_repeat_steps(self): person1_stopped_after_two_signups = _create_person( distinct_ids=["stopped_after_signup1"], team_id=self.team.pk ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup1", + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup1", + ) person2_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup2"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup2") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup2", + ) result = funnel.run() self.assertEqual(result[0]["name"], "user signed up") @@ -667,16 +825,27 @@ def test_basic_funnel_with_repeat_steps(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 1), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2), + [person1_stopped_after_two_signups.uuid], + ) @also_test_with_materialized_columns(["key"]) def test_basic_funnel_with_derivative_steps(self): filters = { "events": [ - {"id": "user signed up", "type": "events", "order": 0, "properties": {"key": "val"}}, + { + "id": "user signed up", + "type": "events", + "order": 0, + "properties": {"key": "val"}, + }, {"id": "user signed up", "type": "events", "order": 1}, ], "insight": INSIGHT_FUNNELS, @@ -691,13 +860,23 @@ def test_basic_funnel_with_derivative_steps(self): distinct_ids=["stopped_after_signup1"], team_id=self.team.pk ) _create_event( - team=self.team, event="user signed up", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup1", ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1") person2_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup2"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="stopped_after_signup2", properties={"key": "val"} + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup2", + properties={"key": "val"}, ) result = funnel.run() @@ -708,15 +887,24 @@ def test_basic_funnel_with_derivative_steps(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 1), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2), + [person1_stopped_after_two_signups.uuid], + ) def test_basic_funnel_with_repeat_step_updated_param(self): people = journeys_for( { - "stopped_after_signup1": [{"event": "user signed up"}, {"event": "user signed up"}], + "stopped_after_signup1": [ + {"event": "user signed up"}, + {"event": "user signed up"}, + ], "stopped_after_signup2": [{"event": "user signed up"}], }, self.team, @@ -743,10 +931,16 @@ def test_basic_funnel_with_repeat_step_updated_param(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 1), - [people["stopped_after_signup1"].uuid, people["stopped_after_signup2"].uuid], + [ + people["stopped_after_signup1"].uuid, + people["stopped_after_signup2"].uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [people["stopped_after_signup1"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2), + [people["stopped_after_signup1"].uuid], + ) filters = { "events": [ @@ -792,7 +986,12 @@ def test_funnel_exclusions_full_window(self): "date_from": "2021-05-01 00:00:00", "date_to": "2021-05-14 00:00:00", "exclusions": [ - {"id": "x 1 name with numbers 2", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1} + { + "id": "x 1 name with numbers 2", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + } ], } filter = Filter(data=filters) @@ -801,26 +1000,53 @@ def test_funnel_exclusions_full_window(self): # event 1 person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person1", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person1", + timestamp="2021-05-01 02:00:00", ) - _create_event(team=self.team, event="paid", distinct_id="person1", timestamp="2021-05-01 02:00:00") # event 2 _create_person(distinct_ids=["person2"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 03:00:00" + team=self.team, + event="user signed up", + distinct_id="person2", + timestamp="2021-05-01 03:00:00", + ) + _create_event( + team=self.team, + event="x 1 name with numbers 2", + distinct_id="person2", + timestamp="2021-05-01 03:30:00", ) _create_event( - team=self.team, event="x 1 name with numbers 2", distinct_id="person2", timestamp="2021-05-01 03:30:00" + team=self.team, + event="paid", + distinct_id="person2", + timestamp="2021-05-01 04:00:00", ) - _create_event(team=self.team, event="paid", distinct_id="person2", timestamp="2021-05-01 04:00:00") # event 3 person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 05:00:00" + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 05:00:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person3", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="paid", distinct_id="person3", timestamp="2021-05-01 06:00:00") result = funnel.run() self.assertEqual(len(result), 2) @@ -845,7 +1071,14 @@ def test_advanced_funnel_exclusions_between_steps(self): "date_from": "2021-05-01 00:00:00", "date_to": "2021-05-14 00:00:00", "insight": INSIGHT_FUNNELS, - "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}], + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + } + ], } person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk) @@ -853,53 +1086,145 @@ def test_advanced_funnel_exclusions_between_steps(self): # this dude is discarded when funnel_from_step = 2 # this dude is discarded when funnel_from_step = 3 _create_event( - team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person1", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person1", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person1", + timestamp="2021-05-01 03:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person1", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person1", + timestamp="2021-05-01 04:30:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person1", + timestamp="2021-05-01 05:00:00", ) - _create_event(team=self.team, event="$pageview", distinct_id="person1", timestamp="2021-05-01 02:00:00") - _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 03:00:00") _create_event( - team=self.team, event="insight viewed", distinct_id="person1", timestamp="2021-05-01 04:00:00" + team=self.team, + event="x", + distinct_id="person1", + timestamp="2021-05-01 05:30:00", ) - _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 04:30:00") _create_event( - team=self.team, event="invite teammate", distinct_id="person1", timestamp="2021-05-01 05:00:00" + team=self.team, + event="pageview2", + distinct_id="person1", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 05:30:00") - _create_event(team=self.team, event="pageview2", distinct_id="person1", timestamp="2021-05-01 06:00:00") person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk) # this dude is discarded when funnel_from_step = 2 # this dude is discarded when funnel_from_step = 3 _create_event( - team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person2", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person2", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person2", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person2", + timestamp="2021-05-01 04:30:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person2", + timestamp="2021-05-01 05:00:00", ) - _create_event(team=self.team, event="$pageview", distinct_id="person2", timestamp="2021-05-01 02:00:00") _create_event( - team=self.team, event="insight viewed", distinct_id="person2", timestamp="2021-05-01 04:00:00" + team=self.team, + event="x", + distinct_id="person2", + timestamp="2021-05-01 05:30:00", ) - _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 04:30:00") _create_event( - team=self.team, event="invite teammate", distinct_id="person2", timestamp="2021-05-01 05:00:00" + team=self.team, + event="pageview2", + distinct_id="person2", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 05:30:00") - _create_event(team=self.team, event="pageview2", distinct_id="person2", timestamp="2021-05-01 06:00:00") person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk) # this dude is discarded when funnel_from_step = 0 # this dude is discarded when funnel_from_step = 3 _create_event( - team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person3", + timestamp="2021-05-01 01:30:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person3", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person3", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person3", + timestamp="2021-05-01 05:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 01:30:00") - _create_event(team=self.team, event="$pageview", distinct_id="person3", timestamp="2021-05-01 02:00:00") _create_event( - team=self.team, event="insight viewed", distinct_id="person3", timestamp="2021-05-01 04:00:00" + team=self.team, + event="x", + distinct_id="person3", + timestamp="2021-05-01 05:30:00", ) _create_event( - team=self.team, event="invite teammate", distinct_id="person3", timestamp="2021-05-01 05:00:00" + team=self.team, + event="pageview2", + distinct_id="person3", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 05:30:00") - _create_event(team=self.team, event="pageview2", distinct_id="person3", timestamp="2021-05-01 06:00:00") filter = Filter(data=filters) funnel = Funnel(filter, self.team) @@ -914,7 +1239,16 @@ def test_advanced_funnel_exclusions_between_steps(self): self.assertCountEqual(self._get_actor_ids_at_step(filter, 1), [person1.uuid, person2.uuid]) filter = filter.shallow_clone( - {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 2}]} + { + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 1, + "funnel_to_step": 2, + } + ] + } ) funnel = Funnel(filter, self.team) @@ -928,7 +1262,16 @@ def test_advanced_funnel_exclusions_between_steps(self): self.assertCountEqual(self._get_actor_ids_at_step(filter, 1), [person2.uuid, person3.uuid]) filter = filter.shallow_clone( - {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 2, "funnel_to_step": 3}]} + { + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 2, + "funnel_to_step": 3, + } + ] + } ) funnel = Funnel(filter, self.team) @@ -942,7 +1285,16 @@ def test_advanced_funnel_exclusions_between_steps(self): self.assertCountEqual(self._get_actor_ids_at_step(filter, 1), [person3.uuid]) filter = filter.shallow_clone( - {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 3, "funnel_to_step": 4}]} + { + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 3, + "funnel_to_step": 4, + } + ] + } ) funnel = Funnel(filter, self.team) @@ -957,7 +1309,16 @@ def test_advanced_funnel_exclusions_between_steps(self): #  bigger step window filter = filter.shallow_clone( - {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 3}]} + { + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 1, + "funnel_to_step": 3, + } + ] + } ) funnel = Funnel(filter, self.team) @@ -988,7 +1349,10 @@ def test_advanced_funnel_with_repeat_steps(self): people = journeys_for( { "stopped_after_signup1": [{"event": "user signed up"}], - "stopped_after_pageview1": [{"event": "user signed up"}, {"event": "$pageview"}], + "stopped_after_pageview1": [ + {"event": "user signed up"}, + {"event": "$pageview"}, + ], "stopped_after_pageview2": [ {"event": "user signed up"}, {"event": "$pageview"}, @@ -1062,11 +1426,17 @@ def test_advanced_funnel_with_repeat_steps(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 4), - [people["stopped_after_pageview3"].uuid, people["stopped_after_pageview4"].uuid], - ) - - self.assertCountEqual(self._get_actor_ids_at_step(filter, 5), [people["stopped_after_pageview4"].uuid]) - + [ + people["stopped_after_pageview3"].uuid, + people["stopped_after_pageview4"].uuid, + ], + ) + + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 5), + [people["stopped_after_pageview4"].uuid], + ) + def test_advanced_funnel_with_repeat_steps_out_of_order_events(self): filters = { "events": [ @@ -1088,45 +1458,85 @@ def test_advanced_funnel_with_repeat_steps_out_of_order_events(self): distinct_ids=["random", "stopped_after_signup1"], team_id=self.team.pk ) _create_event(team=self.team, event="$pageview", distinct_id="random") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup1", + ) person2_stopped_after_one_pageview = _create_person( distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview1") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_pageview1", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview1") person3_stopped_after_two_pageview = _create_person( distinct_ids=["stopped_after_pageview2"], team_id=self.team.pk ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview2") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview2") - _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview2") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_pageview2", + ) + _create_event( + team=self.team, + event="blaah blaa", + distinct_id="stopped_after_pageview2", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview2") person4_stopped_after_three_pageview = _create_person( distinct_ids=["stopped_after_pageview3"], team_id=self.team.pk ) - _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview3") + _create_event( + team=self.team, + event="blaah blaa", + distinct_id="stopped_after_pageview3", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview3") - _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview3") + _create_event( + team=self.team, + event="blaah blaa", + distinct_id="stopped_after_pageview3", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview3") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview3") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_pageview3", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview3") person5_stopped_after_many_pageview = _create_person( distinct_ids=["stopped_after_pageview4"], team_id=self.team.pk ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview4") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_pageview4", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview4") - _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview4") + _create_event( + team=self.team, + event="blaah blaa", + distinct_id="stopped_after_pageview4", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview4") _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview4") _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview4") _create_person(distinct_ids=["stopped_after_pageview5"], team_id=self.team.pk) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview5") - _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_pageview5") + _create_event( + team=self.team, + event="blaah blaa", + distinct_id="stopped_after_pageview5", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview5") _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview5") _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview5") @@ -1168,19 +1578,34 @@ def test_advanced_funnel_with_repeat_steps_out_of_order_events(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [person5_stopped_after_many_pageview.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 3), + [person5_stopped_after_many_pageview.uuid], + ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 4), [person5_stopped_after_many_pageview.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 4), + [person5_stopped_after_many_pageview.uuid], + ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 5), [person5_stopped_after_many_pageview.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 5), + [person5_stopped_after_many_pageview.uuid], + ) @also_test_with_materialized_columns(["key"]) def test_funnel_with_actions(self): - sign_up_action = _create_action( name="sign up", team=self.team, - properties=[{"key": "key", "type": "event", "value": ["val"], "operator": "exact"}], + properties=[ + { + "key": "key", + "type": "event", + "value": ["val"], + "operator": "exact", + } + ], ) filters = { @@ -1199,15 +1624,24 @@ def test_funnel_with_actions(self): distinct_ids=["stopped_after_signup1"], team_id=self.team.pk ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, ) person2_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup2"], team_id=self.team.pk) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup2", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup2", + properties={"key": "val"}, ) result = funnel.run() @@ -1220,17 +1654,29 @@ def test_funnel_with_actions(self): # check ordering of people in first step self.assertCountEqual( self._get_actor_ids_at_step(filter, 1), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2), + [person1_stopped_after_two_signups.uuid], + ) def test_funnel_with_different_actions_at_same_time_count_as_converted(self): - sign_up_action = _create_action( name="sign up", team=self.team, - properties=[{"key": "key", "type": "event", "value": ["val"], "operator": "exact"}], + properties=[ + { + "key": "key", + "type": "event", + "value": ["val"], + "operator": "exact", + } + ], ) filters = { @@ -1250,17 +1696,26 @@ def test_funnel_with_different_actions_at_same_time_count_as_converted(self): distinct_ids=["stopped_after_signup1"], team_id=self.team.pk ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, ) _create_event( - team=self.team, event="$pageview", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="$pageview", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, ) person2_stopped_after_signup = _create_person( distinct_ids=["stopped_after_signup2"], team_id=self.team.pk ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup2", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup2", + properties={"key": "val"}, ) result = funnel.run() @@ -1273,16 +1728,29 @@ def test_funnel_with_different_actions_at_same_time_count_as_converted(self): # check ordering of people in first step self.assertCountEqual( self._get_actor_ids_at_step(filter, 1), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2), + [person1_stopped_after_two_signups.uuid], + ) def test_funnel_with_actions_and_props(self): sign_up_action = _create_action( name="sign up", team=self.team, - properties=[{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + properties=[ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], ) filters = { @@ -1298,20 +1766,33 @@ def test_funnel_with_actions_and_props(self): # event person1_stopped_after_two_signups = _create_person( - distinct_ids=["stopped_after_signup1"], team_id=self.team.pk, properties={"email": "fake@test.com"} + distinct_ids=["stopped_after_signup1"], + team_id=self.team.pk, + properties={"email": "fake@test.com"}, ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, ) person2_stopped_after_signup = _create_person( - distinct_ids=["stopped_after_signup2"], team_id=self.team.pk, properties={"email": "fake@test.com"} + distinct_ids=["stopped_after_signup2"], + team_id=self.team.pk, + properties={"email": "fake@test.com"}, ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup2", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup2", + properties={"key": "val"}, ) result = funnel.run() @@ -1324,13 +1805,18 @@ def test_funnel_with_actions_and_props(self): # check ordering of people in first step self.assertCountEqual( self._get_actor_ids_at_step(filter, 1), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2), + [person1_stopped_after_two_signups.uuid], + ) def test_funnel_with_actions_and_props_with_zero_person_ids(self): - # only a person-on-event test if not get_instance_setting("PERSON_ON_EVENTS_ENABLED"): return True @@ -1338,7 +1824,14 @@ def test_funnel_with_actions_and_props_with_zero_person_ids(self): sign_up_action = _create_action( name="sign up", team=self.team, - properties=[{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}], + properties=[ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ], ) filters = { @@ -1354,20 +1847,33 @@ def test_funnel_with_actions_and_props_with_zero_person_ids(self): # event person1_stopped_after_two_signups = _create_person( - distinct_ids=["stopped_after_signup1"], team_id=self.team.pk, properties={"email": "fake@test.com"} + distinct_ids=["stopped_after_signup1"], + team_id=self.team.pk, + properties={"email": "fake@test.com"}, ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup1", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup1", + properties={"key": "val"}, ) person2_stopped_after_signup = _create_person( - distinct_ids=["stopped_after_signup2"], team_id=self.team.pk, properties={"email": "fake@test.com"} + distinct_ids=["stopped_after_signup2"], + team_id=self.team.pk, + properties={"email": "fake@test.com"}, ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_signup2", properties={"key": "val"} + team=self.team, + event="sign up", + distinct_id="stopped_after_signup2", + properties={"key": "val"}, ) _create_event( @@ -1395,19 +1901,31 @@ def test_funnel_with_actions_and_props_with_zero_person_ids(self): # check ordering of people in first step self.assertCountEqual( self._get_actor_ids_at_step(filter, 1), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1_stopped_after_two_signups.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 2), + [person1_stopped_after_two_signups.uuid], + ) @also_test_with_materialized_columns(["key"]) @skip("Flaky funnel test") def test_funnel_with_actions_and_events(self): - sign_up_action = _create_action( name="sign up", team=self.team, - properties=[{"key": "key", "type": "event", "value": ["val"], "operator": "exact"}], + properties=[ + { + "key": "key", + "type": "event", + "value": ["val"], + "operator": "exact", + } + ], ) filters = { @@ -1479,7 +1997,10 @@ def test_funnel_with_actions_and_events(self): person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 00:00:07" + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 00:00:07", ) _create_event( team=self.team, @@ -1489,7 +2010,10 @@ def test_funnel_with_actions_and_events(self): timestamp="2021-05-01 00:00:08", ) _create_event( - team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 00:00:09" + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 00:00:09", ) _create_event( team=self.team, @@ -1501,7 +2025,10 @@ def test_funnel_with_actions_and_events(self): person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 00:00:11" + team=self.team, + event="user signed up", + distinct_id="person4", + timestamp="2021-05-01 00:00:11", ) _create_event( team=self.team, @@ -1511,7 +2038,10 @@ def test_funnel_with_actions_and_events(self): timestamp="2021-05-01 00:00:12", ) _create_event( - team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 00:00:13" + team=self.team, + event="user signed up", + distinct_id="person4", + timestamp="2021-05-01 00:00:13", ) _create_person(distinct_ids=["person5"], team_id=self.team.pk) @@ -1535,33 +2065,58 @@ def test_funnel_with_actions_and_events(self): # check ordering of people in steps self.assertCountEqual( self._get_actor_ids_at_step(filter, 1), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid, person3.uuid, person4.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + person3.uuid, + person4.uuid, + ], ) self.assertCountEqual( self._get_actor_ids_at_step(filter, 2), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid, person3.uuid, person4.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + person3.uuid, + person4.uuid, + ], ) self.assertCountEqual( self._get_actor_ids_at_step(filter, 3), - [person1_stopped_after_two_signups.uuid, person2_stopped_after_signup.uuid, person3.uuid], + [ + person1_stopped_after_two_signups.uuid, + person2_stopped_after_signup.uuid, + person3.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 4), [person1_stopped_after_two_signups.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 4), + [person1_stopped_after_two_signups.uuid], + ) @also_test_with_materialized_columns(["$current_url"]) def test_funnel_with_matching_properties(self): filters = { "events": [ {"id": "user signed up", "order": 0}, - {"id": "$pageview", "order": 1, "properties": {"$current_url": "aloha.com"}}, + { + "id": "$pageview", + "order": 1, + "properties": {"$current_url": "aloha.com"}, + }, { "id": "$pageview", "order": 2, "properties": {"$current_url": "aloha2.com"}, }, # different event to above - {"id": "$pageview", "order": 3, "properties": {"$current_url": "aloha2.com"}}, + { + "id": "$pageview", + "order": 3, + "properties": {"$current_url": "aloha2.com"}, + }, {"id": "$pageview", "order": 4}, ], "insight": INSIGHT_FUNNELS, @@ -1577,28 +2132,61 @@ def test_funnel_with_matching_properties(self): "stopped_after_signup1": [{"event": "user signed up"}], "stopped_after_pageview1": [ {"event": "user signed up"}, - {"event": "$pageview", "properties": {"$current_url": "aloha.com"}}, + { + "event": "$pageview", + "properties": {"$current_url": "aloha.com"}, + }, ], "stopped_after_pageview2": [ {"event": "user signed up"}, - {"event": "$pageview", "properties": {"$current_url": "aloha.com"}}, - {"event": "blaah blaa", "properties": {"$current_url": "aloha.com"}}, - {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}}, + { + "event": "$pageview", + "properties": {"$current_url": "aloha.com"}, + }, + { + "event": "blaah blaa", + "properties": {"$current_url": "aloha.com"}, + }, + { + "event": "$pageview", + "properties": {"$current_url": "aloha2.com"}, + }, ], "stopped_after_pageview3": [ {"event": "user signed up"}, - {"event": "$pageview", "properties": {"$current_url": "aloha.com"}}, - {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}}, - {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}}, + { + "event": "$pageview", + "properties": {"$current_url": "aloha.com"}, + }, + { + "event": "$pageview", + "properties": {"$current_url": "aloha2.com"}, + }, + { + "event": "$pageview", + "properties": {"$current_url": "aloha2.com"}, + }, {"event": "blaah blaa"}, ], "stopped_after_pageview4": [ {"event": "user signed up"}, - {"event": "$pageview", "properties": {"$current_url": "aloha.com"}}, + { + "event": "$pageview", + "properties": {"$current_url": "aloha.com"}, + }, {"event": "blaah blaa"}, - {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}}, - {"event": "$pageview", "properties": {"$current_url": "aloha.com"}}, - {"event": "$pageview", "properties": {"$current_url": "aloha2.com"}}, + { + "event": "$pageview", + "properties": {"$current_url": "aloha2.com"}, + }, + { + "event": "$pageview", + "properties": {"$current_url": "aloha.com"}, + }, + { + "event": "$pageview", + "properties": {"$current_url": "aloha2.com"}, + }, ], }, self.team, @@ -1647,7 +2235,10 @@ def test_funnel_with_matching_properties(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 4), - [people["stopped_after_pageview3"].uuid, people["stopped_after_pageview4"].uuid], + [ + people["stopped_after_pageview3"].uuid, + people["stopped_after_pageview4"].uuid, + ], ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 5), []) @@ -1658,19 +2249,31 @@ def test_funnel_conversion_window(self): person = _create_person(distinct_ids=[f"user_{i}"], team=self.team) ids_to_compare.append(str(person.uuid)) _create_event( - event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00" + event="step one", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-01 00:00:00", ) _create_event( - event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-02 00:00:00" + event="step two", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-02 00:00:00", ) for i in range(10, 25): _create_person(distinct_ids=[f"user_{i}"], team=self.team) _create_event( - event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00" + event="step one", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-01 00:00:00", ) _create_event( - event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-10 00:00:00" + event="step two", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-10 00:00:00", ) data = { @@ -1693,7 +2296,10 @@ def test_funnel_conversion_window(self): self.assertEqual(results[1]["count"], 10) self.assertEqual(results[2]["count"], 0) - self.assertCountEqual([str(id) for id in self._get_actor_ids_at_step(filter, 2)], ids_to_compare) + self.assertCountEqual( + [str(id) for id in self._get_actor_ids_at_step(filter, 2)], + ids_to_compare, + ) @snapshot_clickhouse_queries def test_funnel_conversion_window_seconds(self): @@ -1702,19 +2308,31 @@ def test_funnel_conversion_window_seconds(self): person = _create_person(distinct_ids=[f"user_{i}"], team=self.team) ids_to_compare.append(str(person.uuid)) _create_event( - event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00" + event="step one", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-01 00:00:00", ) _create_event( - event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:10" + event="step two", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-01 00:00:10", ) for i in range(10, 25): _create_person(distinct_ids=[f"user_{i}"], team=self.team) _create_event( - event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00" + event="step one", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-01 00:00:00", ) _create_event( - event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:20" + event="step two", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-01 00:00:20", ) data = { @@ -1737,7 +2355,10 @@ def test_funnel_conversion_window_seconds(self): self.assertEqual(results[1]["count"], 10) self.assertEqual(results[2]["count"], 0) - self.assertCountEqual([str(id) for id in self._get_actor_ids_at_step(filter, 2)], ids_to_compare) + self.assertCountEqual( + [str(id) for id in self._get_actor_ids_at_step(filter, 2)], + ids_to_compare, + ) def test_funnel_exclusions_invalid_params(self): filters = { @@ -1749,23 +2370,57 @@ def test_funnel_exclusions_invalid_params(self): "funnel_window_days": 14, "date_from": "2021-05-01 00:00:00", "date_to": "2021-05-14 00:00:00", - "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 1}], + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 1, + "funnel_to_step": 1, + } + ], } filter = Filter(data=filters) self.assertRaises(ValidationError, lambda: Funnel(filter, self.team)) filter = filter.shallow_clone( - {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 2}]} + { + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 1, + "funnel_to_step": 2, + } + ] + } ) self.assertRaises(ValidationError, lambda: Funnel(filter, self.team)) filter = filter.shallow_clone( - {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 2, "funnel_to_step": 1}]} + { + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 2, + "funnel_to_step": 1, + } + ] + } ) self.assertRaises(ValidationError, lambda: Funnel(filter, self.team)) filter = filter.shallow_clone( - {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 2}]} + { + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 2, + } + ] + } ) self.assertRaises(ValidationError, lambda: Funnel(filter, self.team)) @@ -1779,7 +2434,14 @@ def test_funnel_exclusion_no_end_event(self): "funnel_window_days": 1, "date_from": "2021-05-01 00:00:00", "date_to": "2021-05-14 00:00:00", - "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}], + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + } + ], } filter = Filter(data=filters) funnel = Funnel(filter, self.team) @@ -1787,32 +2449,69 @@ def test_funnel_exclusion_no_end_event(self): # event 1 person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person1", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person1", + timestamp="2021-05-01 02:00:00", ) - _create_event(team=self.team, event="paid", distinct_id="person1", timestamp="2021-05-01 02:00:00") # event 2 _create_person(distinct_ids=["person2"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 03:00:00" + team=self.team, + event="user signed up", + distinct_id="person2", + timestamp="2021-05-01 03:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person2", + timestamp="2021-05-01 03:30:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person2", + timestamp="2021-05-01 04:00:00", + ) + + # event 3 + _create_person(distinct_ids=["person3"], team_id=self.team.pk) + # should be discarded, even if nothing happened after x, since within conversion window + _create_event( + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 05:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 03:30:00") - _create_event(team=self.team, event="paid", distinct_id="person2", timestamp="2021-05-01 04:00:00") - - # event 3 - _create_person(distinct_ids=["person3"], team_id=self.team.pk) - # should be discarded, even if nothing happened after x, since within conversion window _create_event( - team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 05:00:00" + team=self.team, + event="x", + distinct_id="person3", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 06:00:00") # event 4 - outside conversion window person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 07:00:00" + team=self.team, + event="user signed up", + distinct_id="person4", + timestamp="2021-05-01 07:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person4", + timestamp="2021-05-02 08:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person4", timestamp="2021-05-02 08:00:00") result = funnel.run() self.assertEqual(len(result), 2) @@ -1827,11 +2526,17 @@ def test_funnel_exclusion_no_end_event(self): @also_test_with_materialized_columns(["key"]) def test_funnel_exclusions_with_actions(self): - sign_up_action = _create_action( name="sign up", team=self.team, - properties=[{"key": "key", "type": "event", "value": ["val"], "operator": "exact"}], + properties=[ + { + "key": "key", + "type": "event", + "value": ["val"], + "operator": "exact", + } + ], ) filters = { @@ -1844,7 +2549,12 @@ def test_funnel_exclusions_with_actions(self): "date_from": "2021-05-01 00:00:00", "date_to": "2021-05-14 00:00:00", "exclusions": [ - {"id": sign_up_action.id, "type": "actions", "funnel_from_step": 0, "funnel_to_step": 1} + { + "id": sign_up_action.id, + "type": "actions", + "funnel_from_step": 0, + "funnel_to_step": 1, + } ], } filter = Filter(data=filters) @@ -1853,14 +2563,25 @@ def test_funnel_exclusions_with_actions(self): # event 1 person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person1", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person1", + timestamp="2021-05-01 02:00:00", ) - _create_event(team=self.team, event="paid", distinct_id="person1", timestamp="2021-05-01 02:00:00") # event 2 _create_person(distinct_ids=["person2"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 03:00:00" + team=self.team, + event="user signed up", + distinct_id="person2", + timestamp="2021-05-01 03:00:00", ) _create_event( team=self.team, @@ -1869,14 +2590,27 @@ def test_funnel_exclusions_with_actions(self): properties={"key": "val"}, timestamp="2021-05-01 03:30:00", ) - _create_event(team=self.team, event="paid", distinct_id="person2", timestamp="2021-05-01 04:00:00") + _create_event( + team=self.team, + event="paid", + distinct_id="person2", + timestamp="2021-05-01 04:00:00", + ) # event 3 person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 05:00:00" + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 05:00:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person3", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="paid", distinct_id="person3", timestamp="2021-05-01 06:00:00") result = funnel.run() self.assertEqual(len(result), 2) @@ -1919,7 +2653,12 @@ def test_funnel_with_denormalised_properties(self): timestamp="2020-01-02T14:00:00Z", properties={"test_prop": "hi"}, ) - _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z") + _create_event( + team=self.team, + event="paid", + distinct_id="user_1", + timestamp="2020-01-10T14:00:00Z", + ) result = funnel.run() @@ -1939,69 +2678,190 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self): "date_to": "2021-05-14 00:00:00", "insight": INSIGHT_FUNNELS, "exclusions": [ - {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}, - {"id": "y", "type": "events", "funnel_from_step": 2, "funnel_to_step": 3}, + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + }, + { + "id": "y", + "type": "events", + "funnel_from_step": 2, + "funnel_to_step": 3, + }, ], } _create_person(distinct_ids=["person1"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person1", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person1", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person1", + timestamp="2021-05-01 03:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person1", + timestamp="2021-05-01 04:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 02:00:00") - _create_event(team=self.team, event="$pageview", distinct_id="person1", timestamp="2021-05-01 03:00:00") _create_event( - team=self.team, event="insight viewed", distinct_id="person1", timestamp="2021-05-01 04:00:00" + team=self.team, + event="y", + distinct_id="person1", + timestamp="2021-05-01 04:30:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person1", + timestamp="2021-05-01 05:00:00", ) - _create_event(team=self.team, event="y", distinct_id="person1", timestamp="2021-05-01 04:30:00") _create_event( - team=self.team, event="invite teammate", distinct_id="person1", timestamp="2021-05-01 05:00:00" + team=self.team, + event="pageview2", + distinct_id="person1", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="pageview2", distinct_id="person1", timestamp="2021-05-01 06:00:00") _create_person(distinct_ids=["person2"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person2", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="y", + distinct_id="person2", + timestamp="2021-05-01 01:30:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person2", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person2", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="y", + distinct_id="person2", + timestamp="2021-05-01 04:30:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person2", + timestamp="2021-05-01 05:00:00", ) - _create_event(team=self.team, event="y", distinct_id="person2", timestamp="2021-05-01 01:30:00") - _create_event(team=self.team, event="$pageview", distinct_id="person2", timestamp="2021-05-01 02:00:00") _create_event( - team=self.team, event="insight viewed", distinct_id="person2", timestamp="2021-05-01 04:00:00" + team=self.team, + event="x", + distinct_id="person2", + timestamp="2021-05-01 05:30:00", ) - _create_event(team=self.team, event="y", distinct_id="person2", timestamp="2021-05-01 04:30:00") _create_event( - team=self.team, event="invite teammate", distinct_id="person2", timestamp="2021-05-01 05:00:00" + team=self.team, + event="pageview2", + distinct_id="person2", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 05:30:00") - _create_event(team=self.team, event="pageview2", distinct_id="person2", timestamp="2021-05-01 06:00:00") _create_person(distinct_ids=["person3"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person3", + timestamp="2021-05-01 01:30:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person3", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person3", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person3", + timestamp="2021-05-01 05:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 01:30:00") - _create_event(team=self.team, event="$pageview", distinct_id="person3", timestamp="2021-05-01 02:00:00") _create_event( - team=self.team, event="insight viewed", distinct_id="person3", timestamp="2021-05-01 04:00:00" + team=self.team, + event="x", + distinct_id="person3", + timestamp="2021-05-01 05:30:00", ) _create_event( - team=self.team, event="invite teammate", distinct_id="person3", timestamp="2021-05-01 05:00:00" + team=self.team, + event="pageview2", + distinct_id="person3", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 05:30:00") - _create_event(team=self.team, event="pageview2", distinct_id="person3", timestamp="2021-05-01 06:00:00") person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 01:00:00" + team=self.team, + event="user signed up", + distinct_id="person4", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person4", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person4", + timestamp="2021-05-01 04:00:00", ) - _create_event(team=self.team, event="$pageview", distinct_id="person4", timestamp="2021-05-01 02:00:00") _create_event( - team=self.team, event="insight viewed", distinct_id="person4", timestamp="2021-05-01 04:00:00" + team=self.team, + event="invite teammate", + distinct_id="person4", + timestamp="2021-05-01 05:00:00", ) _create_event( - team=self.team, event="invite teammate", distinct_id="person4", timestamp="2021-05-01 05:00:00" + team=self.team, + event="pageview2", + distinct_id="person4", + timestamp="2021-05-01 06:00:00", ) - _create_event(team=self.team, event="pageview2", distinct_id="person4", timestamp="2021-05-01 06:00:00") filter = Filter(data=filters) funnel = Funnel(filter, self.team) @@ -2018,8 +2878,18 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self): filter = filter.shallow_clone( { "exclusions": [ - {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}, - {"id": "y", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}, + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + }, + { + "id": "y", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + }, ] } ) @@ -2037,8 +2907,18 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self): filter = filter.shallow_clone( { "exclusions": [ - {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}, - {"id": "y", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}, + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + }, + { + "id": "y", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + }, ] } ) @@ -2056,8 +2936,18 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self): filter = filter.shallow_clone( { "exclusions": [ - {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 4}, - {"id": "y", "type": "events", "funnel_from_step": 0, "funnel_to_step": 4}, + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 4, + }, + { + "id": "y", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 4, + }, ] } ) @@ -2095,7 +2985,12 @@ def test_funnel_with_elements_chain(self): "name": "$autocapture", "order": 1, "properties": [ - {"key": "tag_name", "value": [tag_name], "operator": "exact", "type": "element"} + { + "key": "tag_name", + "value": [tag_name], + "operator": "exact", + "type": "element", + } ], "type": "events", }, @@ -2133,22 +3028,52 @@ def test_breakdown_values_is_set_on_the_query_with_fewer_than_two_entities(self) @snapshot_clickhouse_queries def test_funnel_with_cohorts_step_filter(self): - - _create_person(distinct_ids=["user_1"], team_id=self.team.pk, properties={"email": "n@test.com"}) + _create_person( + distinct_ids=["user_1"], + team_id=self.team.pk, + properties={"email": "n@test.com"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="user_1", + timestamp="2020-01-02T14:00:00Z", + ) _create_event( - team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="paid", + distinct_id="user_1", + timestamp="2020-01-10T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z") _create_person(distinct_ids=["user_2"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="user_2", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id="user_2", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="user_2", + timestamp="2020-01-10T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id="user_2", timestamp="2020-01-10T14:00:00Z") cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ] + } + ], ) filters = { @@ -2176,22 +3101,52 @@ def test_funnel_with_cohorts_step_filter(self): @snapshot_clickhouse_queries def test_funnel_with_precalculated_cohort_step_filter(self): - - _create_person(distinct_ids=["user_1"], team_id=self.team.pk, properties={"email": "n@test.com"}) + _create_person( + distinct_ids=["user_1"], + team_id=self.team.pk, + properties={"email": "n@test.com"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="user_1", + timestamp="2020-01-02T14:00:00Z", + ) _create_event( - team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="paid", + distinct_id="user_1", + timestamp="2020-01-10T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z") _create_person(distinct_ids=["user_2"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="user_2", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id="user_2", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="user_2", + timestamp="2020-01-10T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id="user_2", timestamp="2020-01-10T14:00:00Z") cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "email", "operator": "icontains", "value": ".com", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + } + ] + } + ], ) filters = { @@ -2200,7 +3155,13 @@ def test_funnel_with_precalculated_cohort_step_filter(self): "id": "user signed up", "type": "events", "order": 0, - "properties": [{"type": "precalculated-cohort", "key": "id", "value": cohort.pk}], + "properties": [ + { + "type": "precalculated-cohort", + "key": "id", + "value": cohort.pk, + } + ], }, {"id": "paid", "type": "events", "order": 1}, ], @@ -2222,18 +3183,37 @@ def test_funnel_with_precalculated_cohort_step_filter(self): @snapshot_clickhouse_queries def test_funnel_with_static_cohort_step_filter(self): - - _create_person(distinct_ids=["user_1"], team_id=self.team.pk, properties={"email": "n@test.com"}) + _create_person( + distinct_ids=["user_1"], + team_id=self.team.pk, + properties={"email": "n@test.com"}, + ) + _create_event( + team=self.team, + event="user signed up", + distinct_id="user_1", + timestamp="2020-01-02T14:00:00Z", + ) _create_event( - team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="paid", + distinct_id="user_1", + timestamp="2020-01-10T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id="user_1", timestamp="2020-01-10T14:00:00Z") _create_person(distinct_ids=["user_2"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="user_2", timestamp="2020-01-02T14:00:00Z" + team=self.team, + event="user signed up", + distinct_id="user_2", + timestamp="2020-01-02T14:00:00Z", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="user_2", + timestamp="2020-01-10T14:00:00Z", ) - _create_event(team=self.team, event="paid", distinct_id="user_2", timestamp="2020-01-10T14:00:00Z") cohort = Cohort.objects.create(team=self.team, groups=[], is_static=True) cohort.insert_users_by_list(["user_2", "rando"]) @@ -2269,7 +3249,11 @@ def test_funnel_with_property_groups(self): "date_to": "2020-07-01 00:00:00", "events": [ {"id": "user signed up", "order": 0}, - {"id": "$pageview", "order": 1, "properties": {"$current_url": "aloha.com"}}, + { + "id": "$pageview", + "order": 1, + "properties": {"$current_url": "aloha.com"}, + }, { "id": "$pageview", "order": 2, @@ -2284,15 +3268,35 @@ def test_funnel_with_property_groups(self): { "type": "AND", "values": [ - {"key": "email", "operator": "icontains", "value": ".com", "type": "person"}, - {"key": "age", "operator": "exact", "value": "20", "type": "person"}, + { + "key": "email", + "operator": "icontains", + "value": ".com", + "type": "person", + }, + { + "key": "age", + "operator": "exact", + "value": "20", + "type": "person", + }, ], }, { "type": "OR", "values": [ - {"key": "email", "operator": "icontains", "value": ".org", "type": "person"}, - {"key": "age", "operator": "exact", "value": "28", "type": "person"}, + { + "key": "email", + "operator": "icontains", + "value": ".org", + "type": "person", + }, + { + "key": "age", + "operator": "exact", + "value": "28", + "type": "person", + }, ], }, ], @@ -2332,10 +3336,23 @@ def test_funnel_with_property_groups(self): # event journeys_for( { - "stopped_after_signup1": [{"event": "user signed up", "timestamp": datetime(2020, 5, 1, 0)}], - "stopped_after_pageview1": [{"event": "user signed up", "timestamp": datetime(2020, 5, 1, 0)}], + "stopped_after_signup1": [ + { + "event": "user signed up", + "timestamp": datetime(2020, 5, 1, 0), + } + ], + "stopped_after_pageview1": [ + { + "event": "user signed up", + "timestamp": datetime(2020, 5, 1, 0), + } + ], "stopped_after_pageview2": [ - {"event": "user signed up", "timestamp": datetime(2020, 5, 1, 0)}, + { + "event": "user signed up", + "timestamp": datetime(2020, 5, 1, 0), + }, { "event": "$pageview", "properties": {"$current_url": "aloha.com"}, @@ -2343,7 +3360,10 @@ def test_funnel_with_property_groups(self): }, ], "stopped_after_pageview3": [ - {"event": "user signed up", "timestamp": datetime(2020, 5, 1, 0)}, + { + "event": "user signed up", + "timestamp": datetime(2020, 5, 1, 0), + }, { "event": "$pageview", "properties": {"$current_url": "aloha.com"}, @@ -2398,10 +3418,16 @@ def test_funnel_with_property_groups(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 2), - [people["stopped_after_pageview2"].uuid, people["stopped_after_pageview3"].uuid], + [ + people["stopped_after_pageview2"].uuid, + people["stopped_after_pageview3"].uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [people["stopped_after_pageview3"].uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 3), + [people["stopped_after_pageview3"].uuid], + ) @snapshot_clickhouse_queries def test_timezones(self): @@ -2425,7 +3451,10 @@ def test_timezones(self): _create_person(distinct_ids=["user_1"], team_id=self.team.pk) #  this event shouldn't appear as in US/Pacific this would be the previous day _create_event( - team=self.team, event="user signed up", distinct_id="user_1", timestamp="2020-01-01T01:00:00Z" + team=self.team, + event="user signed up", + distinct_id="user_1", + timestamp="2020-01-01T01:00:00Z", ) result = funnel.run() @@ -2434,7 +3463,12 @@ def test_timezones(self): def test_funnel_with_sampling(self): action_play_movie = Action.objects.create(team=self.team, name="watched movie") - ActionStep.objects.create(action=action_play_movie, event="$autocapture", tag_name="a", href="/movie") + ActionStep.objects.create( + action=action_play_movie, + event="$autocapture", + tag_name="a", + href="/movie", + ) funnel = self._basic_funnel( filters={ @@ -2453,7 +3487,10 @@ def test_funnel_with_sampling(self): self._signup_event(distinct_id="stopped_after_pay") self._movie_event(distinct_id="completed_movie") - person_factory(distinct_ids=["had_anonymous_id", "completed_movie"], team_id=self.team.pk) + person_factory( + distinct_ids=["had_anonymous_id", "completed_movie"], + team_id=self.team.pk, + ) self._signup_event(distinct_id="had_anonymous_id") self._movie_event(distinct_id="completed_movie") @@ -2511,7 +3548,10 @@ def test_hogql_aggregation(self): # properties.$session_id result = self._basic_funnel( - filters={**basic_filters, "funnel_aggregate_by_hogql": "properties.$session_id"} + filters={ + **basic_filters, + "funnel_aggregate_by_hogql": "properties.$session_id", + } ).run() self.assertEqual(result[0]["count"], 3) self.assertEqual(result[1]["count"], 2) @@ -2544,14 +3584,27 @@ def test_funnel_all_events_with_properties(self): filters = { "events": [ - {"type": "events", "id": "user signed up", "order": 0, "name": "user signed up", "math": "total"}, + { + "type": "events", + "id": "user signed up", + "order": 0, + "name": "user signed up", + "math": "total", + }, { "type": "events", "id": None, "order": 1, "name": "All events", "math": "total", - "properties": [{"key": "is_saved", "value": ["true"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "is_saved", + "value": ["true"], + "operator": "exact", + "type": "event", + } + ], }, ], "funnel_window_days": 14, diff --git a/posthog/queries/funnels/test/test_funnel_persons.py b/posthog/queries/funnels/test/test_funnel_persons.py index 87517e2bd1bad..46881af6d5b1d 100644 --- a/posthog/queries/funnels/test/test_funnel_persons.py +++ b/posthog/queries/funnels/test/test_funnel_persons.py @@ -9,7 +9,9 @@ from posthog.models.event.util import bulk_create_events from posthog.models.person.util import bulk_create_persons from posthog.queries.funnels.funnel_persons import ClickhouseFunnelActors -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -33,10 +35,20 @@ def _create_sample_data_multiple_dropoffs(self): events = [] for i in range(5): events.append( - {"event": "step one", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-01 00:00:00"} + { + "event": "step one", + "distinct_id": f"user_{i}", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + } ) events.append( - {"event": "step two", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-03 00:00:00"} + { + "event": "step two", + "distinct_id": f"user_{i}", + "team": self.team, + "timestamp": "2021-05-03 00:00:00", + } ) events.append( { @@ -49,21 +61,44 @@ def _create_sample_data_multiple_dropoffs(self): for i in range(5, 15): events.append( - {"event": "step one", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-01 00:00:00"} + { + "event": "step one", + "distinct_id": f"user_{i}", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + } ) events.append( - {"event": "step two", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-03 00:00:00"} + { + "event": "step two", + "distinct_id": f"user_{i}", + "team": self.team, + "timestamp": "2021-05-03 00:00:00", + } ) for i in range(15, 35): events.append( - {"event": "step one", "distinct_id": f"user_{i}", "team": self.team, "timestamp": "2021-05-01 00:00:00"} + { + "event": "step one", + "distinct_id": f"user_{i}", + "team": self.team, + "timestamp": "2021-05-01 00:00:00", + } ) bulk_create_events(events) def _create_browser_breakdown_events(self): - person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk, properties={"$country": "PL"}) - person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk, properties={"$country": "EE"}) + person1 = _create_person( + distinct_ids=["person1"], + team_id=self.team.pk, + properties={"$country": "PL"}, + ) + person2 = _create_person( + distinct_ids=["person2"], + team_id=self.team.pk, + properties={"$country": "EE"}, + ) journeys_for( { "person1": [ @@ -181,9 +216,24 @@ def test_last_step_dropoff(self): def _create_sample_data(self): for i in range(110): _create_person(distinct_ids=[f"user_{i}"], team=self.team) - _create_event(event="step one", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-01 00:00:00") - _create_event(event="step two", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-03 00:00:00") - _create_event(event="step three", distinct_id=f"user_{i}", team=self.team, timestamp="2021-05-05 00:00:00") + _create_event( + event="step one", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-01 00:00:00", + ) + _create_event( + event="step two", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-03 00:00:00", + ) + _create_event( + event="step three", + distinct_id=f"user_{i}", + team=self.team, + timestamp="2021-05-05 00:00:00", + ) def test_basic_offset(self): self._create_sample_data() @@ -306,7 +356,11 @@ def test_first_step_breakdowns(self): "interval": "day", "funnel_window_days": 7, "funnel_step": 1, - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "breakdown_type": "event", "breakdown": "$browser", } @@ -337,7 +391,11 @@ def test_first_step_breakdowns_with_multi_property_breakdown(self): "interval": "day", "funnel_window_days": 7, "funnel_step": 1, - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "breakdown_type": "event", "breakdown": ["$browser", "$browser_version"], } @@ -368,7 +426,11 @@ def test_first_step_breakdown_person(self): "interval": "day", "funnel_window_days": 7, "funnel_step": 1, - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "breakdown_type": "person", "breakdown": "$country", } @@ -384,7 +446,8 @@ def test_first_step_breakdown_person(self): # Check custom_steps give same answers for breakdowns _, custom_step_results, _ = ClickhouseFunnelActors( - filter.shallow_clone({"funnel_step_breakdown": "EE", "funnel_custom_steps": [1, 2, 3]}), self.team + filter.shallow_clone({"funnel_step_breakdown": "EE", "funnel_custom_steps": [1, 2, 3]}), + self.team, ).get_actors() self.assertEqual(results, custom_step_results) @@ -395,7 +458,8 @@ def test_first_step_breakdown_person(self): # Check custom_steps give same answers for breakdowns _, custom_step_results, _ = ClickhouseFunnelActors( - filter.shallow_clone({"funnel_step_breakdown": "PL", "funnel_custom_steps": [1, 2, 3]}), self.team + filter.shallow_clone({"funnel_step_breakdown": "PL", "funnel_custom_steps": [1, 2, 3]}), + self.team, ).get_actors() self.assertEqual(results, custom_step_results) @@ -403,7 +467,11 @@ def test_first_step_breakdown_person(self): def test_funnel_cohort_breakdown_persons(self): person = _create_person(distinct_ids=[f"person1"], team_id=self.team.pk, properties={"key": "value"}) _create_event( - team=self.team, event="sign up", distinct_id=f"person1", properties={}, timestamp="2020-01-02T12:00:00Z" + team=self.team, + event="sign up", + distinct_id=f"person1", + properties={}, + timestamp="2020-01-02T12:00:00Z", ) cohort = Cohort.objects.create( team=self.team, @@ -411,7 +479,11 @@ def test_funnel_cohort_breakdown_persons(self): groups=[{"properties": [{"key": "key", "value": "value", "type": "person"}]}], ) filters = { - "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}, {"id": "buy", "order": 2}], + "events": [ + {"id": "sign up", "order": 0}, + {"id": "play movie", "order": 1}, + {"id": "buy", "order": 2}, + ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", "date_to": "2020-01-08", diff --git a/posthog/queries/funnels/test/test_funnel_strict.py b/posthog/queries/funnels/test/test_funnel_strict.py index 8cc43e176a0e0..0f0d4b691ce21 100644 --- a/posthog/queries/funnels/test/test_funnel_strict.py +++ b/posthog/queries/funnels/test/test_funnel_strict.py @@ -7,9 +7,19 @@ from posthog.models.instance_setting import override_instance_config from posthog.queries.funnels.funnel_strict import ClickhouseFunnelStrict from posthog.queries.funnels.funnel_strict_persons import ClickhouseFunnelStrictActors -from posthog.queries.funnels.test.breakdown_cases import assert_funnel_results_equal, funnel_breakdown_test_factory -from posthog.queries.funnels.test.conversion_time_cases import funnel_conversion_time_test_factory -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person +from posthog.queries.funnels.test.breakdown_cases import ( + assert_funnel_results_equal, + funnel_breakdown_test_factory, +) +from posthog.queries.funnels.test.conversion_time_cases import ( + funnel_conversion_time_test_factory, +) +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + _create_person, +) from posthog.test.test_journeys import journeys_for FORMAT_TIME = "%Y-%m-%d 00:00:00" @@ -24,8 +34,16 @@ def _create_action(**kwargs): return action -class TestFunnelStrictStepsBreakdown(ClickhouseTestMixin, funnel_breakdown_test_factory(ClickhouseFunnelStrict, ClickhouseFunnelStrictActors, _create_event, _create_action, _create_person)): # type: ignore - +class TestFunnelStrictStepsBreakdown( + ClickhouseTestMixin, + funnel_breakdown_test_factory( # type: ignore + ClickhouseFunnelStrict, + ClickhouseFunnelStrictActors, + _create_event, + _create_action, + _create_person, + ), +): maxDiff = None def test_basic_funnel_default_funnel_days_breakdown_event(self): @@ -39,7 +57,6 @@ def test_basic_funnel_default_funnel_days_breakdown_action_materialized(self): pass def test_strict_breakdown_events_with_multiple_properties(self): - filters = { "events": [{"id": "sign up", "order": 0}, {"id": "play movie", "order": 1}], "insight": INSIGHT_FUNNELS, @@ -56,8 +73,16 @@ def test_strict_breakdown_events_with_multiple_properties(self): people = journeys_for( { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, - {"event": "blah", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "blah", + "timestamp": datetime(2020, 1, 1, 13), + "properties": {"$browser": "Chrome"}, + }, { "event": "play movie", "timestamp": datetime(2020, 1, 1, 14), @@ -65,7 +90,11 @@ def test_strict_breakdown_events_with_multiple_properties(self): }, ], "person2": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 13), + "properties": {"$browser": "Safari"}, + }, { "event": "play movie", "timestamp": datetime(2020, 1, 2, 14), @@ -146,14 +175,20 @@ def test_strict_breakdown_events_with_multiple_properties(self): self.assertCountEqual(self._get_actor_ids_at_step(filter, 2, ["Safari"]), [people["person2"].uuid]) -class TestFunnelStrictStepsConversionTime(ClickhouseTestMixin, funnel_conversion_time_test_factory(ClickhouseFunnelStrict, ClickhouseFunnelStrictActors, _create_event, _create_person)): # type: ignore - +class TestFunnelStrictStepsConversionTime( + ClickhouseTestMixin, + funnel_conversion_time_test_factory( # type: ignore + ClickhouseFunnelStrict, + ClickhouseFunnelStrictActors, + _create_event, + _create_person, + ), +): maxDiff = None pass class TestFunnelStrictSteps(ClickhouseTestMixin, APIBaseTest): - maxDiff = None def _get_actor_ids_at_step(self, filter, funnel_step, breakdown_value=None): @@ -177,7 +212,9 @@ def test_basic_strict_funnel(self): funnel = ClickhouseFunnelStrict(filter, self.team) person1_stopped_after_signup = _create_person( - distinct_ids=["stopped_after_signup1"], team_id=self.team.pk, properties={"test": "okay"} + distinct_ids=["stopped_after_signup1"], + team_id=self.team.pk, + properties={"test": "okay"}, ) _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_signup1") @@ -185,32 +222,60 @@ def test_basic_strict_funnel(self): distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview1") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview1") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_pageview1", + ) person3_stopped_after_insight_view = _create_person( distinct_ids=["stopped_after_insightview"], team_id=self.team.pk ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview") _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview", + ) person4_stopped_after_insight_view_not_strict_order = _create_person( distinct_ids=["stopped_after_insightview2"], team_id=self.team.pk ) - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview2") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview2", + ) _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview2") _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview2") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview2") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview2", + ) person5_stopped_after_insight_view_random = _create_person( distinct_ids=["stopped_after_insightview3"], team_id=self.team.pk ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview3") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview3") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview3", + ) _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview3") _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview3") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview3") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview3", + ) person6 = _create_person(distinct_ids=["person6"], team_id=self.team.pk) _create_event(team=self.team, event="blaah blaa", distinct_id="person6") @@ -226,7 +291,11 @@ def test_basic_strict_funnel(self): _create_event(team=self.team, event="blaah blaa", distinct_id="person7") _create_person(distinct_ids=["stopped_after_insightview6"], team_id=self.team.pk) - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview6") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview6", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview6") result = funnel.run() @@ -250,7 +319,8 @@ def test_basic_strict_funnel(self): ) self.assertCountEqual( - self._get_actor_ids_at_step(filter, 2), [person3_stopped_after_insight_view.uuid, person7.uuid] + self._get_actor_ids_at_step(filter, 2), + [person3_stopped_after_insight_view.uuid, person7.uuid], ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [person7.uuid]) @@ -263,7 +333,6 @@ def test_basic_strict_funnel(self): self.assertEqual(result[0]["count"], 7) def test_advanced_strict_funnel(self): - sign_up_action = _create_action( name="sign up", team=self.team, @@ -297,59 +366,122 @@ def test_advanced_strict_funnel(self): person2_stopped_after_one_pageview = _create_person( distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview1") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_pageview1", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview1") person3_stopped_after_insight_view = _create_person( distinct_ids=["stopped_after_insightview"], team_id=self.team.pk ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview") _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_insightview", properties={"key": "val"} + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview", + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="stopped_after_insightview", + properties={"key": "val"}, ) _create_event( - team=self.team, event="sign up", distinct_id="stopped_after_insightview", properties={"key": "val2"} + team=self.team, + event="sign up", + distinct_id="stopped_after_insightview", + properties={"key": "val2"}, ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview") _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview", + ) person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk) _create_event(team=self.team, event="blaah blaa", distinct_id="person4") _create_event(team=self.team, event="user signed up", distinct_id="person4") - _create_event(team=self.team, event="sign up", distinct_id="person4", properties={"key": "val"}) - _create_event(team=self.team, event="$pageview", distinct_id="person4", properties={"key": "val"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="person4", + properties={"key": "val"}, + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person4", + properties={"key": "val"}, + ) _create_event(team=self.team, event="blaah blaa", distinct_id="person4") person5 = _create_person(distinct_ids=["person5"], team_id=self.team.pk) _create_event(team=self.team, event="blaah blaa", distinct_id="person5") _create_event(team=self.team, event="user signed up", distinct_id="person5") - _create_event(team=self.team, event="sign up", distinct_id="person5", properties={"key": "val"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="person5", + properties={"key": "val"}, + ) _create_event(team=self.team, event="$pageview", distinct_id="person5") _create_event(team=self.team, event="blaah blaa", distinct_id="person5") person6 = _create_person(distinct_ids=["person6"], team_id=self.team.pk) _create_event(team=self.team, event="blaah blaa", distinct_id="person6") _create_event(team=self.team, event="user signed up", distinct_id="person6") - _create_event(team=self.team, event="sign up", distinct_id="person6", properties={"key": "val"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="person6", + properties={"key": "val"}, + ) _create_event(team=self.team, event="$pageview", distinct_id="person6") - _create_event(team=self.team, event="pageview", distinct_id="person6", properties={"key": "val1"}) + _create_event( + team=self.team, + event="pageview", + distinct_id="person6", + properties={"key": "val1"}, + ) person7 = _create_person(distinct_ids=["person7"], team_id=self.team.pk) _create_event(team=self.team, event="blaah blaa", distinct_id="person7") _create_event(team=self.team, event="user signed up", distinct_id="person7") - _create_event(team=self.team, event="sign up", distinct_id="person7", properties={"key": "val"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="person7", + properties={"key": "val"}, + ) _create_event(team=self.team, event="$pageview", distinct_id="person7") _create_event(team=self.team, event="user signed up", distinct_id="person7") - _create_event(team=self.team, event="pageview", distinct_id="person7", properties={"key": "val"}) + _create_event( + team=self.team, + event="pageview", + distinct_id="person7", + properties={"key": "val"}, + ) person8 = _create_person(distinct_ids=["person8"], team_id=self.team.pk) _create_event(team=self.team, event="blaah blaa", distinct_id="person8") _create_event(team=self.team, event="user signed up", distinct_id="person8") _create_event(team=self.team, event="user signed up", distinct_id="person8") - _create_event(team=self.team, event="sign up", distinct_id="person8", properties={"key": "val"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="person8", + properties={"key": "val"}, + ) _create_event(team=self.team, event="$pageview", distinct_id="person8") - _create_event(team=self.team, event="pageview", distinct_id="person8", properties={"key": "val"}) + _create_event( + team=self.team, + event="pageview", + distinct_id="person8", + properties={"key": "val"}, + ) result = funnel.run() @@ -410,7 +542,10 @@ def test_basic_strict_funnel_conversion_times(self): person1_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup1"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="stopped_after_signup1", timestamp="2021-05-02 00:00:00" + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup1", + timestamp="2021-05-02 00:00:00", ) person2_stopped_after_one_pageview = _create_person( @@ -423,7 +558,10 @@ def test_basic_strict_funnel_conversion_times(self): timestamp="2021-05-02 00:00:00", ) _create_event( - team=self.team, event="$pageview", distinct_id="stopped_after_pageview1", timestamp="2021-05-02 01:00:00" + team=self.team, + event="$pageview", + distinct_id="stopped_after_pageview1", + timestamp="2021-05-02 01:00:00", ) person3_stopped_after_insight_view = _create_person( @@ -436,7 +574,10 @@ def test_basic_strict_funnel_conversion_times(self): timestamp="2021-05-02 00:00:00", ) _create_event( - team=self.team, event="$pageview", distinct_id="stopped_after_insightview", timestamp="2021-05-02 02:00:00" + team=self.team, + event="$pageview", + distinct_id="stopped_after_insightview", + timestamp="2021-05-02 02:00:00", ) _create_event( team=self.team, @@ -469,7 +610,13 @@ def test_basic_strict_funnel_conversion_times(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 2), - [person2_stopped_after_one_pageview.uuid, person3_stopped_after_insight_view.uuid], + [ + person2_stopped_after_one_pageview.uuid, + person3_stopped_after_insight_view.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [person3_stopped_after_insight_view.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 3), + [person3_stopped_after_insight_view.uuid], + ) diff --git a/posthog/queries/funnels/test/test_funnel_strict_persons.py b/posthog/queries/funnels/test/test_funnel_strict_persons.py index 9c9a304a59e8f..7b76faf42a54a 100644 --- a/posthog/queries/funnels/test/test_funnel_strict_persons.py +++ b/posthog/queries/funnels/test/test_funnel_strict_persons.py @@ -7,7 +7,9 @@ from posthog.constants import INSIGHT_FUNNELS from posthog.models.filters import Filter from posthog.queries.funnels.funnel_strict_persons import ClickhouseFunnelStrictActors -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, diff --git a/posthog/queries/funnels/test/test_funnel_time_to_convert.py b/posthog/queries/funnels/test/test_funnel_time_to_convert.py index 514bb5af66473..dba62ca133ae3 100644 --- a/posthog/queries/funnels/test/test_funnel_time_to_convert.py +++ b/posthog/queries/funnels/test/test_funnel_time_to_convert.py @@ -22,17 +22,52 @@ def test_auto_bin_count_single_step(self): _create_person(distinct_ids=["user b"], team=self.team) _create_person(distinct_ids=["user c"], team=self.team) - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00") - _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 18:00:00", + ) + _create_event( + event="step two", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 19:00:00", + ) # Converted from 0 to 1 in 3600 s - _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00") + _create_event( + event="step three", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 21:00:00", + ) - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00") - _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00") + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:00:00", + ) + _create_event( + event="step two", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:37:00", + ) # Converted from 0 to 1 in 2200 s - _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00") - _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00") + _create_event( + event="step one", + distinct_id="user c", + team=self.team, + timestamp="2021-06-11 07:00:00", + ) + _create_event( + event="step two", + distinct_id="user c", + team=self.team, + timestamp="2021-06-12 06:00:00", + ) # Converted from 0 to 1 in 82_800 s filter = Filter( @@ -60,9 +95,18 @@ def test_auto_bin_count_single_step(self): results, { "bins": [ - (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B - (42510.0, 0), # Analogous to above, just an interval (in this case 26_880 s) up - no users - (82800.0, 1), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C + ( + 2220.0, + 2, + ), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B + ( + 42510.0, + 0, + ), # Analogous to above, just an interval (in this case 26_880 s) up - no users + ( + 82800.0, + 1, + ), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C ], "average_conversion_time": 29_540, }, @@ -75,17 +119,52 @@ def test_auto_bin_count_single_step_duplicate_events(self): _create_person(distinct_ids=["user b"], team=self.team) _create_person(distinct_ids=["user c"], team=self.team) - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00") - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 18:00:00", + ) + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 19:00:00", + ) # Converted from 0 to 1 in 3600 s - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 21:00:00", + ) - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00") - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00") + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:00:00", + ) + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:37:00", + ) # Converted from 0 to 1 in 2200 s - _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00") - _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00") + _create_event( + event="step one", + distinct_id="user c", + team=self.team, + timestamp="2021-06-11 07:00:00", + ) + _create_event( + event="step one", + distinct_id="user c", + team=self.team, + timestamp="2021-06-12 06:00:00", + ) # Converted from 0 to 1 in 82_800 s filter = Filter( @@ -113,9 +192,18 @@ def test_auto_bin_count_single_step_duplicate_events(self): results, { "bins": [ - (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B - (42510.0, 0), # Analogous to above, just an interval (in this case 26_880 s) up - no users - (82800.0, 1), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C + ( + 2220.0, + 2, + ), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B + ( + 42510.0, + 0, + ), # Analogous to above, just an interval (in this case 26_880 s) up - no users + ( + 82800.0, + 1, + ), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C ], "average_conversion_time": 29_540, }, @@ -126,17 +214,52 @@ def test_custom_bin_count_single_step(self): _create_person(distinct_ids=["user b"], team=self.team) _create_person(distinct_ids=["user c"], team=self.team) - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00") - _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 18:00:00", + ) + _create_event( + event="step two", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 19:00:00", + ) # Converted from 0 to 1 in 3600 s - _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00") + _create_event( + event="step three", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 21:00:00", + ) - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00") - _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00") + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:00:00", + ) + _create_event( + event="step two", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:37:00", + ) # Converted from 0 to 1 in 2200 s - _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00") - _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00") + _create_event( + event="step one", + distinct_id="user c", + team=self.team, + timestamp="2021-06-11 07:00:00", + ) + _create_event( + event="step two", + distinct_id="user c", + team=self.team, + timestamp="2021-06-12 06:00:00", + ) # Converted from 0 to 1 in 82_800 s filter = Filter( @@ -165,13 +288,22 @@ def test_custom_bin_count_single_step(self): results, { "bins": [ - (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 13_732 s - users A and B - (13732.0, 0), # Analogous to above, just an interval (in this case 13_732 s) up - no users + ( + 2220.0, + 2, + ), # Reached step 1 from step 0 in at least 2200 s but less than 13_732 s - users A and B + ( + 13732.0, + 0, + ), # Analogous to above, just an interval (in this case 13_732 s) up - no users (25244.0, 0), # And so on (36756.0, 0), (48268.0, 0), (59780.0, 0), - (71292.0, 1), # Reached step 1 from step 0 in at least 71_292 s but less than 82_804 s - user C + ( + 71292.0, + 1, + ), # Reached step 1 from step 0 in at least 71_292 s but less than 82_804 s - user C (82804.0, 0), ], "average_conversion_time": 29_540, @@ -184,16 +316,51 @@ def test_auto_bin_count_total(self): _create_person(distinct_ids=["user b"], team=self.team) _create_person(distinct_ids=["user c"], team=self.team) - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00") - _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00") - _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 18:00:00", + ) + _create_event( + event="step two", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 19:00:00", + ) + _create_event( + event="step three", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 21:00:00", + ) # Converted from 0 to 2 in 10_800 s - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00") - _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00") + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:00:00", + ) + _create_event( + event="step two", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:37:00", + ) - _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00") - _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00") + _create_event( + event="step one", + distinct_id="user c", + team=self.team, + timestamp="2021-06-11 07:00:00", + ) + _create_event( + event="step two", + distinct_id="user c", + team=self.team, + timestamp="2021-06-12 06:00:00", + ) filter = Filter( data={ @@ -217,8 +384,14 @@ def test_auto_bin_count_total(self): results, { "bins": [ - (10800.0, 1), # Reached step 2 from step 0 in at least 10_800 s but less than 10_860 s - user A - (10860.0, 0), # Analogous to above, just an interval (in this case 60 s) up - no users + ( + 10800.0, + 1, + ), # Reached step 2 from step 0 in at least 10_800 s but less than 10_860 s - user A + ( + 10860.0, + 0, + ), # Analogous to above, just an interval (in this case 60 s) up - no users ], "average_conversion_time": 10_800.0, }, @@ -226,7 +399,8 @@ def test_auto_bin_count_total(self): # Let's verify that behavior with steps unspecified is the same as when first and last steps specified funnel_trends_steps_specified = ClickhouseFunnelTimeToConvert( - Filter(data={**filter._data, "funnel_from_step": 0, "funnel_to_step": 2}), self.team + Filter(data={**filter._data, "funnel_from_step": 0, "funnel_to_step": 2}), + self.team, ) results_steps_specified = funnel_trends_steps_specified.run() @@ -238,17 +412,52 @@ def test_basic_unordered(self): _create_person(distinct_ids=["user b"], team=self.team) _create_person(distinct_ids=["user c"], team=self.team) - _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00") - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00") - _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00") + _create_event( + event="step three", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 18:00:00", + ) + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 19:00:00", + ) + _create_event( + event="step two", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 21:00:00", + ) # Converted from 0 to 1 in 7200 s - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00") - _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00") + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:00:00", + ) + _create_event( + event="step two", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:37:00", + ) # Converted from 0 to 1 in 2200 s - _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00") - _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00") + _create_event( + event="step two", + distinct_id="user c", + team=self.team, + timestamp="2021-06-11 07:00:00", + ) + _create_event( + event="step one", + distinct_id="user c", + team=self.team, + timestamp="2021-06-12 06:00:00", + ) # Converted from 0 to 1 in 82_800 s filter = Filter( @@ -278,9 +487,18 @@ def test_basic_unordered(self): results, { "bins": [ - (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B - (42510.0, 0), # Analogous to above, just an interval (in this case 26_880 s) up - no users - (82800.0, 1), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C + ( + 2220.0, + 2, + ), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B + ( + 42510.0, + 0, + ), # Analogous to above, just an interval (in this case 26_880 s) up - no users + ( + 82800.0, + 1, + ), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C ], "average_conversion_time": 29540, }, @@ -293,25 +511,85 @@ def test_basic_strict(self): _create_person(distinct_ids=["user c"], team=self.team) _create_person(distinct_ids=["user d"], team=self.team) - _create_event(event="step one", distinct_id="user a", team=self.team, timestamp="2021-06-08 18:00:00") - _create_event(event="step two", distinct_id="user a", team=self.team, timestamp="2021-06-08 19:00:00") + _create_event( + event="step one", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 18:00:00", + ) + _create_event( + event="step two", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 19:00:00", + ) # Converted from 0 to 1 in 3600 s - _create_event(event="step three", distinct_id="user a", team=self.team, timestamp="2021-06-08 21:00:00") + _create_event( + event="step three", + distinct_id="user a", + team=self.team, + timestamp="2021-06-08 21:00:00", + ) - _create_event(event="step one", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:00:00") - _create_event(event="step two", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:37:00") + _create_event( + event="step one", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:00:00", + ) + _create_event( + event="step two", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:37:00", + ) # Converted from 0 to 1 in 2200 s - _create_event(event="blah", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:38:00") - _create_event(event="step three", distinct_id="user b", team=self.team, timestamp="2021-06-09 13:39:00") + _create_event( + event="blah", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:38:00", + ) + _create_event( + event="step three", + distinct_id="user b", + team=self.team, + timestamp="2021-06-09 13:39:00", + ) - _create_event(event="step one", distinct_id="user c", team=self.team, timestamp="2021-06-11 07:00:00") - _create_event(event="step two", distinct_id="user c", team=self.team, timestamp="2021-06-12 06:00:00") + _create_event( + event="step one", + distinct_id="user c", + team=self.team, + timestamp="2021-06-11 07:00:00", + ) + _create_event( + event="step two", + distinct_id="user c", + team=self.team, + timestamp="2021-06-12 06:00:00", + ) # Converted from 0 to 1 in 82_800 s - _create_event(event="step one", distinct_id="user d", team=self.team, timestamp="2021-06-11 07:00:00") - _create_event(event="blah", distinct_id="user d", team=self.team, timestamp="2021-06-12 07:00:00") + _create_event( + event="step one", + distinct_id="user d", + team=self.team, + timestamp="2021-06-11 07:00:00", + ) + _create_event( + event="blah", + distinct_id="user d", + team=self.team, + timestamp="2021-06-12 07:00:00", + ) # Blah cancels conversion - _create_event(event="step two", distinct_id="user d", team=self.team, timestamp="2021-06-12 09:00:00") + _create_event( + event="step two", + distinct_id="user d", + team=self.team, + timestamp="2021-06-12 09:00:00", + ) filter = Filter( data={ @@ -340,9 +618,18 @@ def test_basic_strict(self): results, { "bins": [ - (2220.0, 2), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B - (42510.0, 0), # Analogous to above, just an interval (in this case 26_880 s) up - no users - (82800.0, 1), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C + ( + 2220.0, + 2, + ), # Reached step 1 from step 0 in at least 2200 s but less than 29_080 s - users A and B + ( + 42510.0, + 0, + ), # Analogous to above, just an interval (in this case 26_880 s) up - no users + ( + 82800.0, + 1, + ), # Reached step 1 from step 0 in at least 82_800 s but less than 109_680 s - user C ], "average_conversion_time": 29540, }, diff --git a/posthog/queries/funnels/test/test_funnel_trends.py b/posthog/queries/funnels/test/test_funnel_trends.py index 12e8b81af02a5..537333ce07476 100644 --- a/posthog/queries/funnels/test/test_funnel_trends.py +++ b/posthog/queries/funnels/test/test_funnel_trends.py @@ -8,7 +8,12 @@ from posthog.models.filters import Filter from posthog.queries.funnels.funnel_trends import ClickhouseFunnelTrends from posthog.queries.funnels.funnel_trends_persons import ClickhouseFunnelTrendsActors -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_person, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_person, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for FORMAT_TIME = "%Y-%m-%d %H:%M:%S" @@ -60,7 +65,10 @@ def _create_sample_data(self): ) def test_no_event_in_period(self): - journeys_for({"user a": [{"event": "Step one", "timestamp": datetime(2021, 6, 6, 21)}]}, self.team) + journeys_for( + {"user a": [{"event": "Step one", "timestamp": datetime(2021, 6, 6, 21)}]}, + self.team, + ) filter = Filter( data={ @@ -86,7 +94,10 @@ def test_no_event_in_period(self): self.assertEqual(formatted_results[0]["days"][0], "2021-06-07") def test_only_one_user_reached_one_step(self): - journeys_for({"user a": [{"event": "step one", "timestamp": datetime(2021, 6, 7, 19)}]}, self.team) + journeys_for( + {"user a": [{"event": "step one", "timestamp": datetime(2021, 6, 7, 19)}]}, + self.team, + ) filter = Filter( data={ @@ -161,7 +172,8 @@ def test_only_one_user_reached_one_step(self): self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1) self.assertEqual( - [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user a"]] + [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], + [["user a"]], ) # No users converted 2021-06-07 @@ -691,7 +703,8 @@ def test_one_person_in_multiple_periods_and_windows(self): self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1) self.assertEqual( - [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user_two"]] + [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], + [["user_two"]], ) # 1 user who converted starting # 2021-05-04 @@ -701,7 +714,8 @@ def test_one_person_in_multiple_periods_and_windows(self): self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1) self.assertEqual( - [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user_one"]] + [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], + [["user_one"]], ) def test_from_second_step(self): @@ -889,7 +903,8 @@ def test_one_person_in_multiple_periods_and_windows_in_unordered_funnel(self): self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1) self.assertEqual( - [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user_two"]] + [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], + [["user_two"]], ) # 1 user who converted starting # 2021-05-04 @@ -899,7 +914,8 @@ def test_one_person_in_multiple_periods_and_windows_in_unordered_funnel(self): self.assertEqual(len(funnel_trends_persons_existent_dropped_off_results), 1) self.assertEqual( - [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], [["user_one"]] + [person["distinct_ids"] for person in funnel_trends_persons_existent_dropped_off_results], + [["user_one"]], ) def test_one_person_in_multiple_periods_and_windows_in_strict_funnel(self): @@ -976,19 +992,55 @@ def test_funnel_step_breakdown_event(self): journeys_for( { "user_one": [ - {"event": "step one", "timestamp": datetime(2021, 5, 1), "properties": {"$browser": "Chrome"}}, - {"event": "step two", "timestamp": datetime(2021, 5, 3), "properties": {"$browser": "Chrome"}}, - {"event": "step three", "timestamp": datetime(2021, 5, 5), "properties": {"$browser": "Chrome"}}, + { + "event": "step one", + "timestamp": datetime(2021, 5, 1), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 3), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "step three", + "timestamp": datetime(2021, 5, 5), + "properties": {"$browser": "Chrome"}, + }, ], "user_two": [ - {"event": "step one", "timestamp": datetime(2021, 5, 2), "properties": {"$browser": "Chrome"}}, - {"event": "step two", "timestamp": datetime(2021, 5, 3), "properties": {"$browser": "Chrome"}}, - {"event": "step three", "timestamp": datetime(2021, 5, 5), "properties": {"$browser": "Chrome"}}, + { + "event": "step one", + "timestamp": datetime(2021, 5, 2), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 3), + "properties": {"$browser": "Chrome"}, + }, + { + "event": "step three", + "timestamp": datetime(2021, 5, 5), + "properties": {"$browser": "Chrome"}, + }, ], "user_three": [ - {"event": "step one", "timestamp": datetime(2021, 5, 3), "properties": {"$browser": "Safari"}}, - {"event": "step two", "timestamp": datetime(2021, 5, 4), "properties": {"$browser": "Safari"}}, - {"event": "step three", "timestamp": datetime(2021, 5, 5), "properties": {"$browser": "Safari"}}, + { + "event": "step one", + "timestamp": datetime(2021, 5, 3), + "properties": {"$browser": "Safari"}, + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 4), + "properties": {"$browser": "Safari"}, + }, + { + "event": "step three", + "timestamp": datetime(2021, 5, 5), + "properties": {"$browser": "Safari"}, + }, ], }, self.team, @@ -1018,16 +1070,40 @@ def test_funnel_step_breakdown_event(self): for res in result: if res["breakdown_value"] == ["Chrome"]: - self.assertEqual(res["data"], [100.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + self.assertEqual( + res["data"], + [ + 100.0, + 100.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + ) elif res["breakdown_value"] == ["Safari"]: - self.assertEqual(res["data"], [0.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + self.assertEqual( + res["data"], + [0.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) else: self.fail(msg="Invalid breakdown value") def test_funnel_step_breakdown_person(self): _create_person(distinct_ids=["user_one"], team=self.team, properties={"$browser": "Chrome"}) _create_person(distinct_ids=["user_two"], team=self.team, properties={"$browser": "Chrome"}) - _create_person(distinct_ids=["user_three"], team=self.team, properties={"$browser": "Safari"}) + _create_person( + distinct_ids=["user_three"], + team=self.team, + properties={"$browser": "Safari"}, + ) journeys_for( { "user_one": [ @@ -1073,16 +1149,40 @@ def test_funnel_step_breakdown_person(self): for res in result: if res["breakdown_value"] == ["Chrome"]: - self.assertEqual(res["data"], [100.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + self.assertEqual( + res["data"], + [ + 100.0, + 100.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], + ) elif res["breakdown_value"] == ["Safari"]: - self.assertEqual(res["data"], [0.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + self.assertEqual( + res["data"], + [0.0, 0.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) else: self.fail(msg="Invalid breakdown value") def test_funnel_trend_cohort_breakdown(self): _create_person(distinct_ids=["user_one"], team=self.team, properties={"key": "value"}) _create_person(distinct_ids=["user_two"], team=self.team, properties={"key": "value"}) - _create_person(distinct_ids=["user_three"], team=self.team, properties={"$browser": "Safari"}) + _create_person( + distinct_ids=["user_three"], + team=self.team, + properties={"$browser": "Safari"}, + ) journeys_for( { @@ -1131,26 +1231,56 @@ def test_funnel_trend_cohort_breakdown(self): result = funnel_trends.run() self.assertEqual(len(result), 1) - self.assertEqual(result[0]["data"], [100.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + self.assertEqual( + result[0]["data"], + [100.0, 100.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) @snapshot_clickhouse_queries def test_timezones_trends(self): journeys_for( { "user_one": [ - {"event": "step one", "timestamp": datetime(2021, 5, 1, 10)}, # 04-30 in pacific - {"event": "step two", "timestamp": datetime(2021, 5, 1, 11)}, # today in pacific - {"event": "step three", "timestamp": datetime(2021, 5, 1, 12)}, # today in pacific + { + "event": "step one", + "timestamp": datetime(2021, 5, 1, 10), + }, # 04-30 in pacific + { + "event": "step two", + "timestamp": datetime(2021, 5, 1, 11), + }, # today in pacific + { + "event": "step three", + "timestamp": datetime(2021, 5, 1, 12), + }, # today in pacific ], "user_two": [ - {"event": "step one", "timestamp": datetime(2021, 5, 1, 1)}, # 04-30 in pacific - {"event": "step two", "timestamp": datetime(2021, 5, 1, 2)}, # 04-30 in pacific - {"event": "step three", "timestamp": datetime(2021, 5, 1, 3)}, # 04-30 in pacific + { + "event": "step one", + "timestamp": datetime(2021, 5, 1, 1), + }, # 04-30 in pacific + { + "event": "step two", + "timestamp": datetime(2021, 5, 1, 2), + }, # 04-30 in pacific + { + "event": "step three", + "timestamp": datetime(2021, 5, 1, 3), + }, # 04-30 in pacific ], "user_three": [ - {"event": "step one", "timestamp": datetime(2021, 5, 1, 1)}, # 04-30 in pacific - {"event": "step two", "timestamp": datetime(2021, 5, 1, 10)}, # today in pacific - {"event": "step three", "timestamp": datetime(2021, 5, 1, 11)}, # today in pacific + { + "event": "step one", + "timestamp": datetime(2021, 5, 1, 1), + }, # 04-30 in pacific + { + "event": "step two", + "timestamp": datetime(2021, 5, 1, 10), + }, # today in pacific + { + "event": "step three", + "timestamp": datetime(2021, 5, 1, 11), + }, # today in pacific ], "user_eight": [], }, diff --git a/posthog/queries/funnels/test/test_funnel_trends_persons.py b/posthog/queries/funnels/test/test_funnel_trends_persons.py index ee75bfb025719..60ec3df37c3ff 100644 --- a/posthog/queries/funnels/test/test_funnel_trends_persons.py +++ b/posthog/queries/funnels/test/test_funnel_trends_persons.py @@ -3,8 +3,14 @@ from posthog.constants import INSIGHT_FUNNELS, FunnelVizType from posthog.models.filters import Filter from posthog.queries.funnels.funnel_trends_persons import ClickhouseFunnelTrendsActors -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for filter_data = { @@ -17,7 +23,11 @@ "funnel_from_step": 0, "entrance_period_start": "2021-05-01 00:00:00", "drop_off": False, - "events": [{"id": "step one", "order": 0}, {"id": "step two", "order": 1}, {"id": "step three", "order": 2}], + "events": [ + {"id": "step one", "order": 0}, + {"id": "step two", "order": 1}, + {"id": "step three", "order": 2}, + ], "include_recordings": "true", } @@ -28,9 +38,21 @@ def test_funnel_trend_persons_returns_recordings(self): persons = journeys_for( { "user_one": [ - {"event": "step one", "timestamp": datetime(2021, 5, 1), "properties": {"$session_id": "s1a"}}, - {"event": "step two", "timestamp": datetime(2021, 5, 2), "properties": {"$session_id": "s1b"}}, - {"event": "step three", "timestamp": datetime(2021, 5, 3), "properties": {"$session_id": "s1c"}}, + { + "event": "step one", + "timestamp": datetime(2021, 5, 1), + "properties": {"$session_id": "s1a"}, + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 2), + "properties": {"$session_id": "s1b"}, + }, + { + "event": "step three", + "timestamp": datetime(2021, 5, 3), + "properties": {"$session_id": "s1c"}, + }, ] }, self.team, @@ -47,16 +69,31 @@ def test_funnel_trend_persons_returns_recordings(self): filter = Filter(data={"funnel_to_step": 1, **filter_data}) _, results, _ = ClickhouseFunnelTrendsActors(filter, self.team).get_actors() self.assertEqual([person["id"] for person in results], [persons["user_one"].uuid]) - self.assertEqual([person["matched_recordings"][0]["session_id"] for person in results], ["s1b"]) + self.assertEqual( + [person["matched_recordings"][0]["session_id"] for person in results], + ["s1b"], + ) @snapshot_clickhouse_queries def test_funnel_trend_persons_with_no_to_step(self): persons = journeys_for( { "user_one": [ - {"event": "step one", "timestamp": datetime(2021, 5, 1), "properties": {"$session_id": "s1a"}}, - {"event": "step two", "timestamp": datetime(2021, 5, 2), "properties": {"$session_id": "s1b"}}, - {"event": "step three", "timestamp": datetime(2021, 5, 3), "properties": {"$session_id": "s1c"}}, + { + "event": "step one", + "timestamp": datetime(2021, 5, 1), + "properties": {"$session_id": "s1a"}, + }, + { + "event": "step two", + "timestamp": datetime(2021, 5, 2), + "properties": {"$session_id": "s1b"}, + }, + { + "event": "step three", + "timestamp": datetime(2021, 5, 3), + "properties": {"$session_id": "s1c"}, + }, ] }, self.team, @@ -74,14 +111,21 @@ def test_funnel_trend_persons_with_no_to_step(self): filter = Filter(data=filter_data) _, results, _ = ClickhouseFunnelTrendsActors(filter, self.team).get_actors() self.assertEqual([person["id"] for person in results], [persons["user_one"].uuid]) - self.assertEqual([person["matched_recordings"][0]["session_id"] for person in results], ["s1c"]) + self.assertEqual( + [person["matched_recordings"][0]["session_id"] for person in results], + ["s1c"], + ) @snapshot_clickhouse_queries def test_funnel_trend_persons_with_drop_off(self): persons = journeys_for( { "user_one": [ - {"event": "step one", "timestamp": datetime(2021, 5, 1), "properties": {"$session_id": "s1a"}} + { + "event": "step one", + "timestamp": datetime(2021, 5, 1), + "properties": {"$session_id": "s1a"}, + } ] }, self.team, @@ -98,4 +142,7 @@ def test_funnel_trend_persons_with_drop_off(self): filter = Filter(data={**filter_data, "drop_off": True}) _, results, _ = ClickhouseFunnelTrendsActors(filter, self.team).get_actors() self.assertEqual([person["id"] for person in results], [persons["user_one"].uuid]) - self.assertEqual([person["matched_recordings"][0].get("session_id") for person in results], ["s1a"]) + self.assertEqual( + [person["matched_recordings"][0].get("session_id") for person in results], + ["s1a"], + ) diff --git a/posthog/queries/funnels/test/test_funnel_unordered.py b/posthog/queries/funnels/test/test_funnel_unordered.py index cb4eaba04776f..ce3643d007fc6 100644 --- a/posthog/queries/funnels/test/test_funnel_unordered.py +++ b/posthog/queries/funnels/test/test_funnel_unordered.py @@ -7,13 +7,17 @@ from posthog.models.action_step import ActionStep from posthog.models.filters import Filter from posthog.queries.funnels.funnel_unordered import ClickhouseFunnelUnordered -from posthog.queries.funnels.funnel_unordered_persons import ClickhouseFunnelUnorderedActors +from posthog.queries.funnels.funnel_unordered_persons import ( + ClickhouseFunnelUnorderedActors, +) from posthog.queries.funnels.test.breakdown_cases import ( FunnelStepResult, assert_funnel_results_equal, funnel_breakdown_test_factory, ) -from posthog.queries.funnels.test.conversion_time_cases import funnel_conversion_time_test_factory +from posthog.queries.funnels.test.conversion_time_cases import ( + funnel_conversion_time_test_factory, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -35,7 +39,16 @@ def _create_action(**kwargs): return action -class TestFunnelUnorderedStepsBreakdown(ClickhouseTestMixin, funnel_breakdown_test_factory(ClickhouseFunnelUnordered, ClickhouseFunnelUnorderedActors, _create_event, _create_action, _create_person)): # type: ignore +class TestFunnelUnorderedStepsBreakdown( + ClickhouseTestMixin, + funnel_breakdown_test_factory( # type: ignore + ClickhouseFunnelUnordered, + ClickhouseFunnelUnorderedActors, + _create_event, + _create_action, + _create_person, + ), +): maxDiff = None def test_funnel_step_breakdown_event_single_person_events_with_multiple_properties(self): @@ -170,21 +183,41 @@ def test_funnel_step_breakdown_with_step_attribution(self): # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)}, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 13), + "properties": {"$browser": "Safari"}, + }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}, ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # step attribution means alakazam is valid when step = 1 - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], } people = journeys_for(events_by_person, self.team) @@ -217,21 +250,41 @@ def test_funnel_step_breakdown_with_step_one_attribution(self): # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ {"event": "sign up", "timestamp": datetime(2020, 1, 1, 13)}, - {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 13), + "properties": {"$browser": "Safari"}, + }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)}, ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # step attribution means alakazam is valid when step = 1 - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], } people = journeys_for(events_by_person, self.team) @@ -261,7 +314,8 @@ def test_funnel_step_breakdown_with_step_one_attribution(self): [people["person1"].uuid, people["person2"].uuid, people["person3"].uuid], ) self.assertCountEqual( - self._get_actor_ids_at_step(filter, 2, ""), [people["person1"].uuid, people["person3"].uuid] + self._get_actor_ids_at_step(filter, 2, ""), + [people["person1"].uuid, people["person3"].uuid], ) self._assert_funnel_breakdown_result_is_correct( @@ -296,7 +350,11 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self) # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ @@ -304,13 +362,25 @@ def test_funnel_step_breakdown_with_step_one_attribution_incomplete_funnel(self) # {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}} ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)} ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # step attribution means alakazam is valid when step = 1 - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], } people = journeys_for(events_by_person, self.team) @@ -396,7 +466,11 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu # event events_by_person = { "person1": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 1, 12), + "properties": {"$browser": "Chrome"}, + }, {"event": "buy", "timestamp": datetime(2020, 1, 1, 13)}, ], "person2": [ @@ -404,13 +478,25 @@ def test_funnel_step_non_array_breakdown_with_step_one_attribution_incomplete_fu # {"event": "buy", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$browser": "Safari"}} ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, # {"event": "buy", "timestamp": datetime(2020, 1, 2, 15)} ], "person4": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 15), "properties": {"$browser": 0}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 15), + "properties": {"$browser": 0}, + }, # step attribution means alakazam is valid when step = 1 - {"event": "buy", "timestamp": datetime(2020, 1, 2, 16), "properties": {"$browser": "alakazam"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 2, 16), + "properties": {"$browser": "alakazam"}, + }, ], } people = journeys_for(events_by_person, self.team) @@ -482,7 +568,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): filters = { "events": [ {"id": "sign up", "order": 0}, - {"id": "buy", "properties": [{"type": "event", "key": "$version", "value": "xyz"}], "order": 1}, + { + "id": "buy", + "properties": [{"type": "event", "key": "$version", "value": "xyz"}], + "order": 1, + }, ], "insight": INSIGHT_FUNNELS, "date_from": "2020-01-01", @@ -506,7 +596,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): "timestamp": datetime(2020, 1, 1, 12), "properties": {"$browser": "Chrome", "$version": "xyz"}, }, - {"event": "buy", "timestamp": datetime(2020, 1, 1, 13), "properties": {"$browser": "Chrome"}}, + { + "event": "buy", + "timestamp": datetime(2020, 1, 1, 13), + "properties": {"$browser": "Chrome"}, + }, # discarded because doesn't meet criteria ], "person2": [ @@ -518,7 +612,11 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): }, ], "person3": [ - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 14), "properties": {"$browser": "Mac"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 14), + "properties": {"$browser": "Mac"}, + }, { "event": "buy", "timestamp": datetime(2020, 1, 2, 15), @@ -541,7 +639,15 @@ def test_funnel_breakdown_correct_breakdown_props_are_chosen_for_step(self): self.assertCountEqual([res[0]["breakdown"] for res in result], [[""], ["Mac"], ["Safari"]]) -class TestFunnelUnorderedStepsConversionTime(ClickhouseTestMixin, funnel_conversion_time_test_factory(ClickhouseFunnelUnordered, ClickhouseFunnelUnorderedActors, _create_event, _create_person)): # type: ignore +class TestFunnelUnorderedStepsConversionTime( + ClickhouseTestMixin, + funnel_conversion_time_test_factory( # type: ignore + ClickhouseFunnelUnordered, + ClickhouseFunnelUnorderedActors, + _create_event, + _create_person, + ), +): maxDiff = None pass @@ -574,43 +680,79 @@ def test_basic_unordered_funnel(self): distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_pageview1") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_pageview1") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_pageview1", + ) person3_stopped_after_insight_view = _create_person( distinct_ids=["stopped_after_insightview"], team_id=self.team.pk ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview") _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview", + ) person4_stopped_after_insight_view_reverse_order = _create_person( distinct_ids=["stopped_after_insightview2"], team_id=self.team.pk ) - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview2") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview2", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview2") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview2") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview2", + ) person5_stopped_after_insight_view_random = _create_person( distinct_ids=["stopped_after_insightview3"], team_id=self.team.pk ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview3") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview3") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview3", + ) _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview3") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview3") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview3", + ) person6_did_only_insight_view = _create_person( distinct_ids=["stopped_after_insightview4"], team_id=self.team.pk ) _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview4") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview4") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview4", + ) person7_did_only_pageview = _create_person(distinct_ids=["stopped_after_insightview5"], team_id=self.team.pk) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview5") _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview5") person8_didnot_signup = _create_person(distinct_ids=["stopped_after_insightview6"], team_id=self.team.pk) - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview6") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview6", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview6") result = funnel.run() @@ -649,7 +791,11 @@ def test_basic_unordered_funnel(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, -2), - [person1_stopped_after_signup.uuid, person6_did_only_insight_view.uuid, person7_did_only_pageview.uuid], + [ + person1_stopped_after_signup.uuid, + person6_did_only_insight_view.uuid, + person7_did_only_pageview.uuid, + ], ) self.assertCountEqual( @@ -691,38 +837,70 @@ def test_big_multi_step_unordered_funnel(self): person3_stopped_after_insight_view = _create_person( distinct_ids=["stopped_after_insightview"], team_id=self.team.pk ) - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview") _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview", + ) person4_stopped_after_insight_view_reverse_order = _create_person( distinct_ids=["stopped_after_insightview2"], team_id=self.team.pk ) - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview2") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview2", + ) _create_event(team=self.team, event="crying", distinct_id="stopped_after_insightview2") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview2") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview2", + ) person5_stopped_after_insight_view_random = _create_person( distinct_ids=["stopped_after_insightview3"], team_id=self.team.pk ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview3") - _create_event(team=self.team, event="user signed up", distinct_id="stopped_after_insightview3") + _create_event( + team=self.team, + event="user signed up", + distinct_id="stopped_after_insightview3", + ) _create_event(team=self.team, event="crying", distinct_id="stopped_after_insightview3") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview3") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview3", + ) person6_did_only_insight_view = _create_person( distinct_ids=["stopped_after_insightview4"], team_id=self.team.pk ) _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview4") - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview4") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview4", + ) person7_did_only_pageview = _create_person(distinct_ids=["stopped_after_insightview5"], team_id=self.team.pk) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview5") _create_event(team=self.team, event="blaah blaa", distinct_id="stopped_after_insightview5") person8_didnot_signup = _create_person(distinct_ids=["stopped_after_insightview6"], team_id=self.team.pk) - _create_event(team=self.team, event="insight viewed", distinct_id="stopped_after_insightview6") + _create_event( + team=self.team, + event="insight viewed", + distinct_id="stopped_after_insightview6", + ) _create_event(team=self.team, event="$pageview", distinct_id="stopped_after_insightview6") funnel = ClickhouseFunnelUnordered(filter, self.team) @@ -771,7 +949,10 @@ def test_big_multi_step_unordered_funnel(self): ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 4), [person5_stopped_after_insight_view_random.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 4), + [person5_stopped_after_insight_view_random.uuid], + ) def test_basic_unordered_funnel_conversion_times(self): filter = Filter( @@ -792,14 +973,20 @@ def test_basic_unordered_funnel_conversion_times(self): person1_stopped_after_signup = _create_person(distinct_ids=["stopped_after_signup1"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="stopped_after_signup1", timestamp="2021-05-02 00:00:00" + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup1", + timestamp="2021-05-02 00:00:00", ) person2_stopped_after_one_pageview = _create_person( distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk ) _create_event( - team=self.team, event="$pageview", distinct_id="stopped_after_pageview1", timestamp="2021-05-02 00:00:00" + team=self.team, + event="$pageview", + distinct_id="stopped_after_pageview1", + timestamp="2021-05-02 00:00:00", ) _create_event( team=self.team, @@ -824,11 +1011,17 @@ def test_basic_unordered_funnel_conversion_times(self): timestamp="2021-05-02 02:00:00", ) _create_event( - team=self.team, event="$pageview", distinct_id="stopped_after_insightview", timestamp="2021-05-02 04:00:00" + team=self.team, + event="$pageview", + distinct_id="stopped_after_insightview", + timestamp="2021-05-02 04:00:00", ) _create_event( - team=self.team, event="$pageview", distinct_id="stopped_after_insightview", timestamp="2021-05-03 00:00:00" + team=self.team, + event="$pageview", + distinct_id="stopped_after_insightview", + timestamp="2021-05-03 00:00:00", ) _create_event( team=self.team, @@ -870,10 +1063,16 @@ def test_basic_unordered_funnel_conversion_times(self): self.assertCountEqual( self._get_actor_ids_at_step(filter, 2), - [person2_stopped_after_one_pageview.uuid, person3_stopped_after_insight_view.uuid], + [ + person2_stopped_after_one_pageview.uuid, + person3_stopped_after_insight_view.uuid, + ], ) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 3), [person3_stopped_after_insight_view.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 3), + [person3_stopped_after_insight_view.uuid], + ) def test_single_event_unordered_funnel(self): filter = Filter( @@ -889,12 +1088,18 @@ def test_single_event_unordered_funnel(self): _create_person(distinct_ids=["stopped_after_signup1"], team_id=self.team.pk) _create_event( - team=self.team, event="user signed up", distinct_id="stopped_after_signup1", timestamp="2021-05-02 00:00:00" + team=self.team, + event="user signed up", + distinct_id="stopped_after_signup1", + timestamp="2021-05-02 00:00:00", ) _create_person(distinct_ids=["stopped_after_pageview1"], team_id=self.team.pk) _create_event( - team=self.team, event="$pageview", distinct_id="stopped_after_pageview1", timestamp="2021-05-02 00:00:00" + team=self.team, + event="$pageview", + distinct_id="stopped_after_pageview1", + timestamp="2021-05-02 00:00:00", ) _create_event( team=self.team, @@ -917,14 +1122,30 @@ def test_funnel_exclusions_invalid_params(self): ], "insight": INSIGHT_FUNNELS, "funnel_window_days": 14, - "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 1, "funnel_to_step": 1}], + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 1, + "funnel_to_step": 1, + } + ], } filter = Filter(data=filters) self.assertRaises(ValidationError, lambda: ClickhouseFunnelUnordered(filter, self.team).run()) # partial windows not allowed for unordered filter = filter.shallow_clone( - {"exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}]} + { + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + } + ] + } ) self.assertRaises(ValidationError, lambda: ClickhouseFunnelUnordered(filter, self.team).run()) @@ -938,26 +1159,68 @@ def test_funnel_exclusions_full_window(self): "funnel_window_days": 14, "date_from": "2021-05-01 00:00:00", "date_to": "2021-05-14 00:00:00", - "exclusions": [{"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 1}], + "exclusions": [ + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 1, + } + ], } filter = Filter(data=filters) funnel = ClickhouseFunnelUnordered(filter, self.team) # event 1 person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00") - _create_event(team=self.team, event="paid", distinct_id="person1", timestamp="2021-05-01 02:00:00") + _create_event( + team=self.team, + event="user signed up", + distinct_id="person1", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person1", + timestamp="2021-05-01 02:00:00", + ) # event 2 person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 03:00:00") - _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 03:30:00") - _create_event(team=self.team, event="paid", distinct_id="person2", timestamp="2021-05-01 04:00:00") + _create_event( + team=self.team, + event="user signed up", + distinct_id="person2", + timestamp="2021-05-01 03:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person2", + timestamp="2021-05-01 03:30:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person2", + timestamp="2021-05-01 04:00:00", + ) # event 3 person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 05:00:00") - _create_event(team=self.team, event="paid", distinct_id="person3", timestamp="2021-05-01 06:00:00") + _create_event( + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 05:00:00", + ) + _create_event( + team=self.team, + event="paid", + distinct_id="person3", + timestamp="2021-05-01 06:00:00", + ) result = funnel.run() @@ -967,7 +1230,10 @@ def test_funnel_exclusions_full_window(self): self.assertEqual(result[1]["name"], "Completed 2 steps") self.assertEqual(result[1]["count"], 2) - self.assertCountEqual(self._get_actor_ids_at_step(filter, 1), [person1.uuid, person2.uuid, person3.uuid]) + self.assertCountEqual( + self._get_actor_ids_at_step(filter, 1), + [person1.uuid, person2.uuid, person3.uuid], + ) self.assertCountEqual(self._get_actor_ids_at_step(filter, 2), [person1.uuid, person3.uuid]) def test_advanced_funnel_multiple_exclusions_between_steps(self): @@ -983,56 +1249,246 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self): "date_to": "2021-05-14 00:00:00", "insight": INSIGHT_FUNNELS, "exclusions": [ - {"id": "x", "type": "events", "funnel_from_step": 0, "funnel_to_step": 4}, - {"id": "y", "type": "events", "funnel_from_step": 0, "funnel_to_step": 4}, + { + "id": "x", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 4, + }, + { + "id": "y", + "type": "events", + "funnel_from_step": 0, + "funnel_to_step": 4, + }, ], } person1 = _create_person(distinct_ids=["person1"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="person1", timestamp="2021-05-01 01:00:00") - _create_event(team=self.team, event="x", distinct_id="person1", timestamp="2021-05-01 02:00:00") - _create_event(team=self.team, event="$pageview", distinct_id="person1", timestamp="2021-05-01 03:00:00") - _create_event(team=self.team, event="insight viewed", distinct_id="person1", timestamp="2021-05-01 04:00:00") - _create_event(team=self.team, event="y", distinct_id="person1", timestamp="2021-05-01 04:30:00") - _create_event(team=self.team, event="invite teammate", distinct_id="person1", timestamp="2021-05-01 05:00:00") - _create_event(team=self.team, event="pageview2", distinct_id="person1", timestamp="2021-05-01 06:00:00") + _create_event( + team=self.team, + event="user signed up", + distinct_id="person1", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person1", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person1", + timestamp="2021-05-01 03:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person1", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="y", + distinct_id="person1", + timestamp="2021-05-01 04:30:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person1", + timestamp="2021-05-01 05:00:00", + ) + _create_event( + team=self.team, + event="pageview2", + distinct_id="person1", + timestamp="2021-05-01 06:00:00", + ) person2 = _create_person(distinct_ids=["person2"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="person2", timestamp="2021-05-01 01:00:00") - _create_event(team=self.team, event="y", distinct_id="person2", timestamp="2021-05-01 01:30:00") - _create_event(team=self.team, event="$pageview", distinct_id="person2", timestamp="2021-05-01 02:00:00") - _create_event(team=self.team, event="insight viewed", distinct_id="person2", timestamp="2021-05-01 04:00:00") - _create_event(team=self.team, event="y", distinct_id="person2", timestamp="2021-05-01 04:30:00") - _create_event(team=self.team, event="invite teammate", distinct_id="person2", timestamp="2021-05-01 05:00:00") - _create_event(team=self.team, event="x", distinct_id="person2", timestamp="2021-05-01 05:30:00") - _create_event(team=self.team, event="pageview2", distinct_id="person2", timestamp="2021-05-01 06:00:00") + _create_event( + team=self.team, + event="user signed up", + distinct_id="person2", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="y", + distinct_id="person2", + timestamp="2021-05-01 01:30:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person2", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person2", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="y", + distinct_id="person2", + timestamp="2021-05-01 04:30:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person2", + timestamp="2021-05-01 05:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person2", + timestamp="2021-05-01 05:30:00", + ) + _create_event( + team=self.team, + event="pageview2", + distinct_id="person2", + timestamp="2021-05-01 06:00:00", + ) person3 = _create_person(distinct_ids=["person3"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="person3", timestamp="2021-05-01 01:00:00") - _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 01:30:00") - _create_event(team=self.team, event="$pageview", distinct_id="person3", timestamp="2021-05-01 02:00:00") - _create_event(team=self.team, event="insight viewed", distinct_id="person3", timestamp="2021-05-01 04:00:00") - _create_event(team=self.team, event="invite teammate", distinct_id="person3", timestamp="2021-05-01 05:00:00") - _create_event(team=self.team, event="x", distinct_id="person3", timestamp="2021-05-01 05:30:00") - _create_event(team=self.team, event="pageview2", distinct_id="person3", timestamp="2021-05-01 06:00:00") + _create_event( + team=self.team, + event="user signed up", + distinct_id="person3", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person3", + timestamp="2021-05-01 01:30:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person3", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person3", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person3", + timestamp="2021-05-01 05:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person3", + timestamp="2021-05-01 05:30:00", + ) + _create_event( + team=self.team, + event="pageview2", + distinct_id="person3", + timestamp="2021-05-01 06:00:00", + ) person4 = _create_person(distinct_ids=["person4"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="person4", timestamp="2021-05-01 01:00:00") - _create_event(team=self.team, event="$pageview", distinct_id="person4", timestamp="2021-05-01 02:00:00") - _create_event(team=self.team, event="insight viewed", distinct_id="person4", timestamp="2021-05-01 04:00:00") - _create_event(team=self.team, event="invite teammate", distinct_id="person4", timestamp="2021-05-01 05:00:00") - _create_event(team=self.team, event="pageview2", distinct_id="person4", timestamp="2021-05-01 06:00:00") + _create_event( + team=self.team, + event="user signed up", + distinct_id="person4", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person4", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person4", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person4", + timestamp="2021-05-01 05:00:00", + ) + _create_event( + team=self.team, + event="pageview2", + distinct_id="person4", + timestamp="2021-05-01 06:00:00", + ) person5 = _create_person(distinct_ids=["person5"], team_id=self.team.pk) - _create_event(team=self.team, event="user signed up", distinct_id="person5", timestamp="2021-05-01 01:00:00") - _create_event(team=self.team, event="x", distinct_id="person5", timestamp="2021-05-01 01:30:00") - _create_event(team=self.team, event="$pageview", distinct_id="person5", timestamp="2021-05-01 02:00:00") - _create_event(team=self.team, event="x", distinct_id="person5", timestamp="2021-05-01 02:30:00") - _create_event(team=self.team, event="insight viewed", distinct_id="person5", timestamp="2021-05-01 04:00:00") - _create_event(team=self.team, event="y", distinct_id="person5", timestamp="2021-05-01 04:30:00") - _create_event(team=self.team, event="invite teammate", distinct_id="person5", timestamp="2021-05-01 05:00:00") - _create_event(team=self.team, event="x", distinct_id="person5", timestamp="2021-05-01 05:30:00") - _create_event(team=self.team, event="pageview2", distinct_id="person5", timestamp="2021-05-01 06:00:00") + _create_event( + team=self.team, + event="user signed up", + distinct_id="person5", + timestamp="2021-05-01 01:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person5", + timestamp="2021-05-01 01:30:00", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="person5", + timestamp="2021-05-01 02:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person5", + timestamp="2021-05-01 02:30:00", + ) + _create_event( + team=self.team, + event="insight viewed", + distinct_id="person5", + timestamp="2021-05-01 04:00:00", + ) + _create_event( + team=self.team, + event="y", + distinct_id="person5", + timestamp="2021-05-01 04:30:00", + ) + _create_event( + team=self.team, + event="invite teammate", + distinct_id="person5", + timestamp="2021-05-01 05:00:00", + ) + _create_event( + team=self.team, + event="x", + distinct_id="person5", + timestamp="2021-05-01 05:30:00", + ) + _create_event( + team=self.team, + event="pageview2", + distinct_id="person5", + timestamp="2021-05-01 06:00:00", + ) filter = Filter(data=filters) funnel = ClickhouseFunnelUnordered(filter, self.team) @@ -1058,18 +1514,36 @@ def test_advanced_funnel_multiple_exclusions_between_steps(self): def test_funnel_unordered_all_events_with_properties(self): _create_person(distinct_ids=["user"], team=self.team) _create_event(event="user signed up", distinct_id="user", team=self.team) - _create_event(event="added to card", distinct_id="user", properties={"is_saved": True}, team=self.team) + _create_event( + event="added to card", + distinct_id="user", + properties={"is_saved": True}, + team=self.team, + ) filters = { "events": [ - {"type": "events", "id": "user signed up", "order": 0, "name": "user signed up", "math": "total"}, + { + "type": "events", + "id": "user signed up", + "order": 0, + "name": "user signed up", + "math": "total", + }, { "type": "events", "id": None, "order": 1, "name": "All events", "math": "total", - "properties": [{"key": "is_saved", "value": ["true"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "is_saved", + "value": ["true"], + "operator": "exact", + "type": "event", + } + ], }, ], "funnel_window_days": 14, @@ -1084,9 +1558,17 @@ def test_funnel_unordered_all_events_with_properties(self): def test_funnel_unordered_entity_filters(self): _create_person(distinct_ids=["user"], team=self.team) - _create_event(event="user signed up", distinct_id="user", properties={"prop_a": "some value"}, team=self.team) _create_event( - event="user signed up", distinct_id="user", properties={"prop_b": "another value"}, team=self.team + event="user signed up", + distinct_id="user", + properties={"prop_a": "some value"}, + team=self.team, + ) + _create_event( + event="user signed up", + distinct_id="user", + properties={"prop_b": "another value"}, + team=self.team, ) filters = { @@ -1097,7 +1579,14 @@ def test_funnel_unordered_entity_filters(self): "order": 0, "name": "user signed up", "math": "total", - "properties": [{"key": "prop_a", "value": ["some value"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "prop_a", + "value": ["some value"], + "operator": "exact", + "type": "event", + } + ], }, { "type": "events", @@ -1105,7 +1594,14 @@ def test_funnel_unordered_entity_filters(self): "order": 1, "name": "user signed up", "math": "total", - "properties": [{"key": "prop_b", "value": "another", "operator": "icontains", "type": "event"}], + "properties": [ + { + "key": "prop_b", + "value": "another", + "operator": "icontains", + "type": "event", + } + ], }, ], } diff --git a/posthog/queries/funnels/test/test_funnel_unordered_persons.py b/posthog/queries/funnels/test/test_funnel_unordered_persons.py index 673dee6d30826..c00e6975f5044 100644 --- a/posthog/queries/funnels/test/test_funnel_unordered_persons.py +++ b/posthog/queries/funnels/test/test_funnel_unordered_persons.py @@ -6,8 +6,12 @@ from posthog.constants import INSIGHT_FUNNELS from posthog.models.filters import Filter -from posthog.queries.funnels.funnel_unordered_persons import ClickhouseFunnelUnorderedActors -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.queries.funnels.funnel_unordered_persons import ( + ClickhouseFunnelUnorderedActors, +) +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, diff --git a/posthog/queries/funnels/test/test_utils.py b/posthog/queries/funnels/test/test_utils.py index 8a71c02e6f6e5..c45a4eddcb518 100644 --- a/posthog/queries/funnels/test/test_utils.py +++ b/posthog/queries/funnels/test/test_utils.py @@ -1,6 +1,10 @@ from posthog.constants import FunnelOrderType from posthog.models.filters import Filter -from posthog.queries.funnels import ClickhouseFunnel, ClickhouseFunnelStrict, ClickhouseFunnelUnordered +from posthog.queries.funnels import ( + ClickhouseFunnel, + ClickhouseFunnelStrict, + ClickhouseFunnelUnordered, +) from posthog.queries.funnels.utils import get_funnel_order_class from posthog.test.base import BaseTest diff --git a/posthog/queries/funnels/utils.py b/posthog/queries/funnels/utils.py index 33ef56e271bd2..68f93c2d4542e 100644 --- a/posthog/queries/funnels/utils.py +++ b/posthog/queries/funnels/utils.py @@ -6,7 +6,11 @@ def get_funnel_order_class(filter: Filter) -> Type[ClickhouseFunnelBase]: - from posthog.queries.funnels import ClickhouseFunnel, ClickhouseFunnelStrict, ClickhouseFunnelUnordered + from posthog.queries.funnels import ( + ClickhouseFunnel, + ClickhouseFunnelStrict, + ClickhouseFunnelUnordered, + ) if filter.funnel_order_type == FunnelOrderType.UNORDERED: return ClickhouseFunnelUnordered diff --git a/posthog/queries/insight.py b/posthog/queries/insight.py index 294eb012d60b0..5992d16ddf1e9 100644 --- a/posthog/queries/insight.py +++ b/posthog/queries/insight.py @@ -7,7 +7,13 @@ # Wrapper around sync_execute, adding query tags for insights performance def insight_sync_execute( - query, args=None, *, team_id: int, query_type: str, filter: Optional["FilterType"] = None, **kwargs + query, + args=None, + *, + team_id: int, + query_type: str, + filter: Optional["FilterType"] = None, + **kwargs, ): tag_queries(team_id=team_id) _tag_query(query, query_type, filter) diff --git a/posthog/queries/paths/paths.py b/posthog/queries/paths/paths.py index b829d8487dddf..6a98857e3927d 100644 --- a/posthog/queries/paths/paths.py +++ b/posthog/queries/paths/paths.py @@ -104,7 +104,6 @@ def _exec_query(self) -> List[Tuple]: ) def get_query(self) -> str: - path_query = self.get_path_query() funnel_cte = "" @@ -198,7 +197,6 @@ def should_query_funnel(self) -> bool: return False def get_path_query(self) -> str: - paths_per_person_query = self.get_paths_per_person_query() self.params["edge_limit"] = self._filter.edge_limit @@ -243,7 +241,10 @@ def get_session_threshold_clause(self) -> str: # Implemented in /ee def get_target_clause(self) -> Tuple[str, Dict]: - params: Dict[str, Union[str, None]] = {"target_point": None, "secondary_target_point": None} + params: Dict[str, Union[str, None]] = { + "target_point": None, + "secondary_target_point": None, + } filtered_path_ordering_clause = self.get_filtered_path_ordering() compacting_function = self.get_array_compacting_function() diff --git a/posthog/queries/paths/paths_event_query.py b/posthog/queries/paths/paths_event_query.py index 6cc96243cc034..913307b7fdc21 100644 --- a/posthog/queries/paths/paths_event_query.py +++ b/posthog/queries/paths/paths_event_query.py @@ -22,7 +22,6 @@ class PathEventQuery(EventQuery): _filter: PathFilter def get_query(self) -> Tuple[str, Dict[str, Any]]: - funnel_paths_timestamp = "" funnel_paths_join = "" funnel_paths_filter = "" @@ -55,7 +54,13 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: ] _fields += [f"{self.EVENT_TABLE_ALIAS}.{field} AS {field}" for field in self._extra_fields] _fields += [ - get_property_string_expr("events", field, f"'{field}'", "properties", table_alias=self.EVENT_TABLE_ALIAS)[0] + get_property_string_expr( + "events", + field, + f"'{field}'", + "properties", + table_alias=self.EVENT_TABLE_ALIAS, + )[0] + f" as {field}" for field in self._extra_event_properties ] diff --git a/posthog/queries/person_distinct_id_query.py b/posthog/queries/person_distinct_id_query.py index 9e8d0606337bb..c04711fbe2370 100644 --- a/posthog/queries/person_distinct_id_query.py +++ b/posthog/queries/person_distinct_id_query.py @@ -2,7 +2,6 @@ def get_team_distinct_ids_query(team_id: int) -> str: - # ensure team_id is actually an int so we can safely interpolate into the query assert isinstance(team_id, int) diff --git a/posthog/queries/person_query.py b/posthog/queries/person_query.py index 43b86d79ec256..73a779e5aca6a 100644 --- a/posthog/queries/person_query.py +++ b/posthog/queries/person_query.py @@ -5,8 +5,14 @@ from posthog.constants import PropertyOperatorType from posthog.models import Filter from posthog.models.cohort import Cohort -from posthog.models.cohort.sql import GET_COHORTPEOPLE_BY_COHORT_ID, GET_STATIC_COHORTPEOPLE_BY_COHORT_ID -from posthog.models.cohort.util import format_precalculated_cohort_query, format_static_cohort_query +from posthog.models.cohort.sql import ( + GET_COHORTPEOPLE_BY_COHORT_ID, + GET_STATIC_COHORTPEOPLE_BY_COHORT_ID, +) +from posthog.models.cohort.util import ( + format_precalculated_cohort_query, + format_static_cohort_query, +) from posthog.models.entity import Entity from posthog.models.filters.path_filter import PathFilter from posthog.models.filters.retention_filter import RetentionFilter @@ -71,7 +77,8 @@ def __init__( self._extra_fields = self._extra_fields - {self.PERSON_PROPERTIES_ALIAS} | {"properties"} properties = self._filter.property_groups.combine_property_group( - PropertyOperatorType.AND, self._entity.property_groups if self._entity else None + PropertyOperatorType.AND, + self._entity.property_groups if self._entity else None, ) self._inner_person_properties = self._column_optimizer.property_optimizer.parse_property_groups( @@ -79,7 +86,10 @@ def __init__( ).inner def get_query( - self, prepend: Optional[Union[str, int]] = None, paginate: bool = False, filter_future_persons: bool = False + self, + prepend: Optional[Union[str, int]] = None, + paginate: bool = False, + filter_future_persons: bool = False, ) -> Tuple[str, Dict]: prepend = str(prepend) if prepend is not None else "" @@ -92,7 +102,10 @@ def get_query( person_filters_finalization_condition, person_filters_params, ) = self._get_person_filter_clauses(prepend=prepend) - multiple_cohorts_condition, multiple_cohorts_params = self._get_multiple_cohorts_clause(prepend=prepend) + ( + multiple_cohorts_condition, + multiple_cohorts_params, + ) = self._get_multiple_cohorts_clause(prepend=prepend) single_cohort_join, single_cohort_params = self._get_fast_single_cohort_clause() if paginate: order = "ORDER BY argMax(person.created_at, version) DESC, id DESC" if paginate else "" @@ -100,9 +113,11 @@ def get_query( else: order = "" limit_offset, limit_params = "", {} - search_prefiltering_condition, search_finalization_condition, search_params = self._get_search_clauses( - prepend=prepend - ) + ( + search_prefiltering_condition, + search_finalization_condition, + search_params, + ) = self._get_search_clauses(prepend=prepend) distinct_id_condition, distinct_id_params = self._get_distinct_id_clause() email_condition, email_params = self._get_email_clause() filter_future_persons_condition = ( @@ -228,7 +243,11 @@ def _get_fast_single_cohort_clause(self) -> Tuple[str, Dict]: ) {self.COHORT_TABLE_ALIAS} ON {self.COHORT_TABLE_ALIAS}.person_id = person.id """, - {"team_id": self._team_id, "cohort_id": self._cohort.pk, "version": self._cohort.version}, + { + "team_id": self._team_id, + "cohort_id": self._cohort.pk, + "version": self._cohort.version, + }, ) else: return "", {} @@ -301,7 +320,14 @@ def _get_search_clauses(self, prepend: str = "") -> Tuple[str, str, Dict]: prop_group = PropertyGroup( type=PropertyOperatorType.AND, - values=[Property(key="email", operator="icontains", value=self._filter.search, type="person")], + values=[ + Property( + key="email", + operator="icontains", + value=self._filter.search, + type="person", + ) + ], ) finalization_conditions_sql, params = parse_prop_grouped_clauses( team_id=self._team_id, @@ -315,7 +341,7 @@ def _get_search_clauses(self, prepend: str = "") -> Tuple[str, str, Dict]: ) finalization_sql = f"AND ({finalization_conditions_sql} OR {id_conditions_sql})" - prefiltering_conditions_sql, prefiltering_params = parse_prop_grouped_clauses( + (prefiltering_conditions_sql, prefiltering_params,) = parse_prop_grouped_clauses( team_id=self._team_id, property_group=prop_group, prepend=f"search_pre_{prepend}", @@ -360,7 +386,8 @@ def _add_distinct_id_join_if_needed(self, query: str, params: Dict[Any, Any]) -> GROUP BY person.* ORDER BY created_at desc, id desc """.format( - person_query=query, distinct_id_query=get_team_distinct_ids_query(self._team_id) + person_query=query, + distinct_id_query=get_team_distinct_ids_query(self._team_id), ), params, ) @@ -371,7 +398,9 @@ def _get_email_clause(self) -> Tuple[str, Dict]: if self._filter.email: return prop_filter_json_extract( - Property(key="email", value=self._filter.email, type="person"), 0, prepend="_email" + Property(key="email", value=self._filter.email, type="person"), + 0, + prepend="_email", ) return "", {} diff --git a/posthog/queries/properties_timeline/properties_timeline.py b/posthog/queries/properties_timeline/properties_timeline.py index 328c0da8fa03c..578a9dee85620 100644 --- a/posthog/queries/properties_timeline/properties_timeline.py +++ b/posthog/queries/properties_timeline/properties_timeline.py @@ -5,7 +5,10 @@ from posthog.models.filters.properties_timeline_filter import PropertiesTimelineFilter from posthog.models.group.group import Group from posthog.models.person.person import Person -from posthog.models.property.util import extract_tables_and_properties, get_single_or_multi_property_string_expr +from posthog.models.property.util import ( + extract_tables_and_properties, + get_single_or_multi_property_string_expr, +) from posthog.models.team.team import Team from posthog.queries.insight import insight_sync_execute from posthog.queries.trends.util import offset_time_series_date_by_interval @@ -87,7 +90,9 @@ def run( filter = filter.shallow_clone( { "date_to": offset_time_series_date_by_interval( - cast(datetime.datetime, filter.date_from), filter=filter, team=team + cast(datetime.datetime, filter.date_from), + filter=filter, + team=team, ) } ) @@ -120,7 +125,10 @@ def run( actor_properties_column=actor_properties_column, ) - params = {**event_query_params, "actor_id": actor.uuid if isinstance(actor, Person) else actor.group_key} + params = { + **event_query_params, + "actor_id": actor.uuid if isinstance(actor, Person) else actor.group_key, + } raw_query_result = insight_sync_execute( formatted_sql, {**params, **filter.hogql_context.values}, diff --git a/posthog/queries/property_values.py b/posthog/queries/property_values.py index d5d37a076e869..a8b943f25d1d2 100644 --- a/posthog/queries/property_values.py +++ b/posthog/queries/property_values.py @@ -3,7 +3,10 @@ from django.utils import timezone from posthog.models.event.sql import SELECT_PROP_VALUES_SQL_WITH_FILTER -from posthog.models.person.sql import SELECT_PERSON_PROP_VALUES_SQL, SELECT_PERSON_PROP_VALUES_SQL_WITH_FILTER +from posthog.models.person.sql import ( + SELECT_PERSON_PROP_VALUES_SQL, + SELECT_PERSON_PROP_VALUES_SQL_WITH_FILTER, +) from posthog.models.property.util import get_property_string_expr from posthog.models.team import Team from posthog.queries.insight import insight_sync_execute @@ -11,7 +14,10 @@ def get_property_values_for_key( - key: str, team: Team, event_names: Optional[List[str]] = None, value: Optional[str] = None + key: str, + team: Team, + event_names: Optional[List[str]] = None, + value: Optional[str] = None, ): property_field, mat_column_exists = get_property_string_expr("events", key, "%(key)s", "properties") parsed_date_from = "AND timestamp >= '{}'".format( diff --git a/posthog/queries/query_date_range.py b/posthog/queries/query_date_range.py index 208bf0207843d..e0604c3b44b91 100644 --- a/posthog/queries/query_date_range.py +++ b/posthog/queries/query_date_range.py @@ -10,8 +10,16 @@ from posthog.models.filters.mixins.interval import IntervalMixin from posthog.models.team import Team -from posthog.queries.util import TIME_IN_SECONDS, get_earliest_timestamp, get_start_of_interval_sql -from posthog.utils import DEFAULT_DATE_FROM_DAYS, relative_date_parse, relative_date_parse_with_delta_mapping +from posthog.queries.util import ( + TIME_IN_SECONDS, + get_earliest_timestamp, + get_start_of_interval_sql, +) +from posthog.utils import ( + DEFAULT_DATE_FROM_DAYS, + relative_date_parse, + relative_date_parse_with_delta_mapping, +) class QueryDateRange: @@ -28,7 +36,13 @@ class QueryDateRange: _table: str _should_round: Optional[bool] - def __init__(self, filter: AnyFilter, team: Team, should_round: Optional[bool] = None, table="") -> None: + def __init__( + self, + filter: AnyFilter, + team: Team, + should_round: Optional[bool] = None, + table="", + ) -> None: filter.team = team # This is a dirty - but the easiest - way to get the team into the filter self._filter = filter self._team = team @@ -97,7 +111,10 @@ def date_to(self) -> Tuple[str, Dict]: date_to_query = self.date_to_clause date_to = self.date_to_param - date_to_param = {"date_to": date_to.strftime("%Y-%m-%d %H:%M:%S"), "timezone": self._team.timezone} + date_to_param = { + "date_to": date_to.strftime("%Y-%m-%d %H:%M:%S"), + "timezone": self._team.timezone, + } return date_to_query, date_to_param @@ -106,7 +123,10 @@ def date_from(self) -> Tuple[str, Dict]: date_from_query = self.date_from_clause date_from = self.date_from_param - date_from_param = {"date_from": date_from.strftime("%Y-%m-%d %H:%M:%S"), "timezone": self._team.timezone} + date_from_param = { + "date_from": date_from.strftime("%Y-%m-%d %H:%M:%S"), + "timezone": self._team.timezone, + } return date_from_query, date_from_param diff --git a/posthog/queries/retention/__init__.py b/posthog/queries/retention/__init__.py index f0817c016bb5c..c3d1590058eea 100644 --- a/posthog/queries/retention/__init__.py +++ b/posthog/queries/retention/__init__.py @@ -1,7 +1,9 @@ from posthog.settings import EE_AVAILABLE if EE_AVAILABLE: - from ee.clickhouse.queries.retention.retention import ClickhouseRetention as Retention + from ee.clickhouse.queries.retention.retention import ( + ClickhouseRetention as Retention, + ) else: from posthog.queries.retention.retention import Retention # type: ignore diff --git a/posthog/queries/retention/actors_query.py b/posthog/queries/retention/actors_query.py index ef31f6fd8c473..5a49c510a3240 100644 --- a/posthog/queries/retention/actors_query.py +++ b/posthog/queries/retention/actors_query.py @@ -99,7 +99,10 @@ def build_actor_activity_query( aggregate_users_by_distinct_id: Optional[bool] = None, retention_events_query=RetentionEventsQuery, ) -> Tuple[str, Dict[str, Any]]: - from posthog.queries.retention import build_returning_event_query, build_target_event_query + from posthog.queries.retention import ( + build_returning_event_query, + build_target_event_query, + ) """ The retention actor query is used to retrieve something of the form: @@ -134,7 +137,8 @@ def build_actor_activity_query( } query = RETENTION_BREAKDOWN_ACTOR_SQL.format( - returning_event_query=returning_event_query, target_event_query=target_event_query + returning_event_query=returning_event_query, + target_event_query=target_event_query, ) return query, all_params @@ -147,7 +151,6 @@ def _build_actor_query( selected_interval: Optional[int] = None, retention_events_query=RetentionEventsQuery, ) -> Tuple[str, Dict[str, Any]]: - actor_activity_query, actor_activity_query_params = build_actor_activity_query( filter=filter, team=team, @@ -157,7 +160,11 @@ def _build_actor_query( retention_events_query=retention_events_query, ) - params = {"offset": filter.offset, "limit": filter.limit or 100, **actor_activity_query_params} + params = { + "offset": filter.offset, + "limit": filter.limit or 100, + **actor_activity_query_params, + } actor_query_template = """ SELECT actor_id, diff --git a/posthog/queries/retention/retention.py b/posthog/queries/retention/retention.py index 145ee1404c37b..24cbe95376e93 100644 --- a/posthog/queries/retention/retention.py +++ b/posthog/queries/retention/retention.py @@ -6,7 +6,10 @@ from posthog.models.filters.retention_filter import RetentionFilter from posthog.models.team import Team from posthog.queries.insight import insight_sync_execute -from posthog.queries.retention.actors_query import RetentionActorsByPeriod, build_actor_activity_query +from posthog.queries.retention.actors_query import ( + RetentionActorsByPeriod, + build_actor_activity_query, +) from posthog.queries.retention.retention_events_query import RetentionEventsQuery from posthog.queries.retention.sql import RETENTION_BREAKDOWN_SQL from posthog.queries.retention.types import BreakdownValues, CohortKey @@ -49,7 +52,9 @@ def _get_retention_by_breakdown_values( "count": correct_result_for_sampling(count, filter.sampling_factor), "people": [], "people_url": self._construct_people_url_for_trend_breakdown_interval( - filter=filter, breakdown_values=breakdown_values, selected_interval=intervals_from_base + filter=filter, + breakdown_values=breakdown_values, + selected_interval=intervals_from_base, ), } for (breakdown_values, intervals_from_base, count) in result @@ -58,10 +63,17 @@ def _get_retention_by_breakdown_values( return result_dict def _construct_people_url_for_trend_breakdown_interval( - self, filter: RetentionFilter, selected_interval: int, breakdown_values: BreakdownValues + self, + filter: RetentionFilter, + selected_interval: int, + breakdown_values: BreakdownValues, ): params = RetentionFilter( - {**filter._data, "breakdown_values": breakdown_values, "selected_interval": selected_interval}, + { + **filter._data, + "breakdown_values": breakdown_values, + "selected_interval": selected_interval, + }, ).to_params() return f"{self._base_uri}api/person/retention/?{urlencode(params)}" @@ -69,7 +81,10 @@ def process_breakdown_table_result(self, resultset: Dict[CohortKey, Dict[str, An result = [ { "values": [ - resultset.get(CohortKey(breakdown_values, interval), {"count": 0, "people": []}) + resultset.get( + CohortKey(breakdown_values, interval), + {"count": 0, "people": []}, + ) for interval in range(filter.total_intervals) ], "label": "::".join(map(str, breakdown_values)), @@ -84,7 +99,12 @@ def process_breakdown_table_result(self, resultset: Dict[CohortKey, Dict[str, An return result - def process_table_result(self, resultset: Dict[CohortKey, Dict[str, Any]], filter: RetentionFilter, team: Team): + def process_table_result( + self, + resultset: Dict[CohortKey, Dict[str, Any]], + filter: RetentionFilter, + team: Team, + ): """ Constructs a response for the rest api when there is no breakdown specified @@ -96,7 +116,11 @@ def process_table_result(self, resultset: Dict[CohortKey, Dict[str, Any]], filte def construct_url(first_day): params = RetentionFilter( - {**filter._data, "display": "ActionsTable", "breakdown_values": [first_day]}, + { + **filter._data, + "display": "ActionsTable", + "breakdown_values": [first_day], + }, ).to_params() return "/api/person/retention/?" f"{urlencode(params)}" diff --git a/posthog/queries/retention/retention_events_query.py b/posthog/queries/retention/retention_events_query.py index a3adba8b7bda6..609f66387e865 100644 --- a/posthog/queries/retention/retention_events_query.py +++ b/posthog/queries/retention/retention_events_query.py @@ -38,7 +38,6 @@ def __init__( ) def get_query(self) -> Tuple[str, Dict[str, Any]]: - _fields = [ self.get_timestamp_field(), self.target_field(), @@ -176,7 +175,9 @@ def target_field(self) -> str: def get_timestamp_field(self) -> str: start_of_inteval_sql = get_start_of_interval_sql( - self._filter.period, source=f"{self.EVENT_TABLE_ALIAS}.timestamp", team=self._team + self._filter.period, + source=f"{self.EVENT_TABLE_ALIAS}.timestamp", + team=self._team, ) if self._event_query_type == RetentionQueryType.TARGET: return f"DISTINCT {start_of_inteval_sql} AS event_date" diff --git a/posthog/queries/stickiness/__init__.py b/posthog/queries/stickiness/__init__.py index 6e3acf68fdaa8..421459fd7cfdd 100644 --- a/posthog/queries/stickiness/__init__.py +++ b/posthog/queries/stickiness/__init__.py @@ -2,7 +2,9 @@ if EE_AVAILABLE: from ee.clickhouse.queries.stickiness import ClickhouseStickiness as Stickiness - from ee.clickhouse.queries.stickiness import ClickhouseStickinessActors as StickinessActors + from ee.clickhouse.queries.stickiness import ( + ClickhouseStickinessActors as StickinessActors, + ) else: from posthog.queries.stickiness.stickiness import Stickiness # type: ignore from posthog.queries.stickiness.stickiness_actors import StickinessActors # type: ignore diff --git a/posthog/queries/stickiness/stickiness.py b/posthog/queries/stickiness/stickiness.py index 2a43419be00e1..08bea51c8b042 100644 --- a/posthog/queries/stickiness/stickiness.py +++ b/posthog/queries/stickiness/stickiness.py @@ -20,7 +20,6 @@ class Stickiness: actor_query_class = StickinessActors def run(self, filter: StickinessFilter, team: Team, *args, **kwargs) -> List[Dict[str, Any]]: - response = [] for entity in filter.entities: if entity.type == TREND_FILTER_TYPE_ACTIONS and entity.id is not None: @@ -44,14 +43,26 @@ def stickiness(self, entity: Entity, filter: StickinessFilter, team: Team) -> Di counts = insight_sync_execute( query, - {**event_params, **filter.hogql_context.values, "num_intervals": filter.total_intervals}, + { + **event_params, + **filter.hogql_context.values, + "num_intervals": filter.total_intervals, + }, query_type="stickiness", filter=filter, team_id=team.pk, ) return self.process_result(counts, filter, entity) - def people(self, target_entity: Entity, filter: StickinessFilter, team: Team, request, *args, **kwargs): + def people( + self, + target_entity: Entity, + filter: StickinessFilter, + team: Team, + request, + *args, + **kwargs, + ): _, serialized_actors, _ = self.actor_query_class(entity=target_entity, filter=filter, team=team).get_actors() return serialized_actors diff --git a/posthog/queries/stickiness/stickiness_event_query.py b/posthog/queries/stickiness/stickiness_event_query.py index df7fb280b37b0..0e70af72bb997 100644 --- a/posthog/queries/stickiness/stickiness_event_query.py +++ b/posthog/queries/stickiness/stickiness_event_query.py @@ -20,7 +20,6 @@ def __init__(self, entity: Entity, *args, **kwargs): super().__init__(*args, **kwargs) def get_query(self) -> Tuple[str, Dict[str, Any]]: - prop_query, prop_params = self._get_prop_groups( self._filter.property_groups.combine_property_group(PropertyOperatorType.AND, self._entity.property_groups), person_properties_mode=get_person_properties_mode(self._team), diff --git a/posthog/queries/test/test_base.py b/posthog/queries/test/test_base.py index 0710babe19525..bccc9ca60a53e 100644 --- a/posthog/queries/test/test_base.py +++ b/posthog/queries/test/test_base.py @@ -21,7 +21,10 @@ def test_determine_compared_filter(self): self.assertIsInstance(compared_filter, PathFilter) self.assertDictContainsSubset( - {"date_from": "2020-05-16T00:00:00+00:00", "date_to": "2020-05-22T23:59:59.999999+00:00"}, + { + "date_from": "2020-05-16T00:00:00+00:00", + "date_to": "2020-05-22T23:59:59.999999+00:00", + }, compared_filter.to_dict(), ) @@ -186,7 +189,8 @@ def test_match_property_date_operators(self): self.assertTrue(match_property(property_a, {"key": datetime.datetime(2022, 4, 30, 1, 2, 3)})) self.assertTrue( match_property( - property_a, {"key": datetime.datetime(2022, 4, 30, 1, 2, 3, tzinfo=tz.gettz("Europe/Madrid"))} + property_a, + {"key": datetime.datetime(2022, 4, 30, 1, 2, 3, tzinfo=tz.gettz("Europe/Madrid"))}, ) ) self.assertTrue(match_property(property_a, {"key": parser.parse("2022-04-30")})) diff --git a/posthog/queries/test/test_lifecycle.py b/posthog/queries/test/test_lifecycle.py index bafbf6bbdff5e..6bb34bfd7d143 100644 --- a/posthog/queries/test/test_lifecycle.py +++ b/posthog/queries/test/test_lifecycle.py @@ -34,7 +34,10 @@ def _create_events(self, data, event="$pageview"): _create_person( team_id=self.team.pk, distinct_ids=[id], - properties={"name": id, **({"email": "test@posthog.com"} if id == "p1" else {})}, + properties={ + "name": id, + **({"email": "test@posthog.com"} if id == "p1" else {}), + }, ) ) for timestamp in timestamps: @@ -241,14 +244,34 @@ def test_lifecycle_trend_prop_filtering(self): ) _create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "p2"}) - _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-09T12:00:00Z") - _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-12T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2020-01-09T12:00:00Z", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2020-01-12T12:00:00Z", + ) _create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "p3"}) - _create_event(team=self.team, event="$pageview", distinct_id="p3", timestamp="2020-01-12T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p3", + timestamp="2020-01-12T12:00:00Z", + ) _create_person(team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "p4"}) - _create_event(team=self.team, event="$pageview", distinct_id="p4", timestamp="2020-01-15T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p4", + timestamp="2020-01-15T12:00:00Z", + ) result = Trends().run( Filter( @@ -352,14 +375,34 @@ def test_lifecycle_trend_person_prop_filtering(self): ) _create_person(team_id=self.team.pk, distinct_ids=["p2"], properties={"name": "p2"}) - _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-09T12:00:00Z") - _create_event(team=self.team, event="$pageview", distinct_id="p2", timestamp="2020-01-12T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2020-01-09T12:00:00Z", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p2", + timestamp="2020-01-12T12:00:00Z", + ) _create_person(team_id=self.team.pk, distinct_ids=["p3"], properties={"name": "p3"}) - _create_event(team=self.team, event="$pageview", distinct_id="p3", timestamp="2020-01-12T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p3", + timestamp="2020-01-12T12:00:00Z", + ) _create_person(team_id=self.team.pk, distinct_ids=["p4"], properties={"name": "p4"}) - _create_event(team=self.team, event="$pageview", distinct_id="p4", timestamp="2020-01-15T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p4", + timestamp="2020-01-15T12:00:00Z", + ) result = Trends().run( Filter( @@ -392,15 +435,44 @@ def test_lifecycle_trend_person_prop_filtering(self): def test_lifecycle_trends_distinct_id_repeat(self): with freeze_time("2020-01-12T12:00:00Z"): - _create_person(team_id=self.team.pk, distinct_ids=["p1", "another_p1"], properties={"name": "p1"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["p1", "another_p1"], + properties={"name": "p1"}, + ) - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-12T12:00:00Z") - _create_event(team=self.team, event="$pageview", distinct_id="another_p1", timestamp="2020-01-14T12:00:00Z") - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-15T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-12T12:00:00Z", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="another_p1", + timestamp="2020-01-14T12:00:00Z", + ) + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-15T12:00:00Z", + ) - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-17T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-17T12:00:00Z", + ) - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-19T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-19T12:00:00Z", + ) result = Trends().run( Filter( @@ -495,7 +567,10 @@ def test_lifecycle_trend_people_paginated(self): person_id = "person{}".format(i) _create_person(team_id=self.team.pk, distinct_ids=[person_id]) _create_event( - team=self.team, event="$pageview", distinct_id=person_id, timestamp="2020-01-15T12:00:00Z" + team=self.team, + event="$pageview", + distinct_id=person_id, + timestamp="2020-01-15T12:00:00Z", ) # even if set to hour 6 it should default to beginning of day and include all pageviews above result = self.client.get( @@ -635,7 +710,15 @@ def test_lifecycle_trend_weeks(self): ) self.assertEqual( - result[0]["days"], ["2020-02-03", "2020-02-10", "2020-02-17", "2020-02-24", "2020-03-02", "2020-03-09"] + result[0]["days"], + [ + "2020-02-03", + "2020-02-10", + "2020-02-17", + "2020-02-24", + "2020-03-02", + "2020-03-09", + ], ) assertLifecycleResults( @@ -812,7 +895,10 @@ def test_timezones(self): assertLifecycleResults( result_pacific, [ - {"status": "dormant", "data": [-1.0, -2.0, -1.0, 0.0, -2.0, 0.0, -1.0, 0.0]}, + { + "status": "dormant", + "data": [-1.0, -2.0, -1.0, 0.0, -2.0, 0.0, -1.0, 0.0], + }, {"status": "new", "data": [1, 0, 0, 1, 0, 0, 0, 0]}, {"status": "resurrecting", "data": [1, 1, 0, 1, 0, 1, 0, 1]}, {"status": "returning", "data": [0, 0, 0, 0, 0, 0, 0, 0]}, diff --git a/posthog/queries/test/test_paths.py b/posthog/queries/test/test_paths.py index c7e3df9ced0a9..45f09a9ca5787 100644 --- a/posthog/queries/test/test_paths.py +++ b/posthog/queries/test/test_paths.py @@ -53,7 +53,11 @@ def test_current_url_paths_and_logic(self): ] ) - _create_person(team_id=self.team.pk, distinct_ids=["person_1"], properties={"email": "test@posthog.com"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person_1"], + properties={"email": "test@posthog.com"}, + ) events.append( _create_event( properties={"$current_url": "/"}, @@ -191,7 +195,10 @@ def test_current_url_paths_and_logic(self): date_from = now() - relativedelta(days=7) date_to = now() + relativedelta(days=7) - date_params = {"date_from": date_from.strftime("%Y-%m-%d"), "date_to": date_to.strftime("%Y-%m-%d")} + date_params = { + "date_from": date_from.strftime("%Y-%m-%d"), + "date_to": date_to.strftime("%Y-%m-%d"), + } filter = PathFilter(team=self.team, data={**date_params}) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) @@ -204,7 +211,10 @@ def test_current_url_paths_and_logic(self): date_from = now() + relativedelta(days=7) date_to = now() - relativedelta(days=7) - date_params = {"date_from": date_from.strftime("%Y-%m-%d"), "date_to": date_to.strftime("%Y-%m-%d")} + date_params = { + "date_from": date_from.strftime("%Y-%m-%d"), + "date_to": date_to.strftime("%Y-%m-%d"), + } filter = PathFilter(team=self.team, data={**date_params}) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) self.assertEqual(len(response), 0) @@ -215,18 +225,86 @@ def test_custom_event_paths(self): _create_person(team_id=self.team.pk, distinct_ids=["person_3"]) _create_person(team_id=self.team.pk, distinct_ids=["person_4"]) - _create_event(distinct_id="person_1", event="custom_event_1", team=self.team, properties={}), - _create_event(distinct_id="person_1", event="custom_event_3", team=self.team, properties={}), - _create_event( - properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team - ), # should be ignored, - _create_event(distinct_id="person_2", event="custom_event_1", team=self.team, properties={}), - _create_event(distinct_id="person_2", event="custom_event_2", team=self.team, properties={}), - _create_event(distinct_id="person_2", event="custom_event_3", team=self.team, properties={}), - _create_event(distinct_id="person_3", event="custom_event_2", team=self.team, properties={}), - _create_event(distinct_id="person_3", event="custom_event_1", team=self.team, properties={}), - _create_event(distinct_id="person_4", event="custom_event_1", team=self.team, properties={}), - _create_event(distinct_id="person_4", event="custom_event_2", team=self.team, properties={}), + ( + _create_event( + distinct_id="person_1", + event="custom_event_1", + team=self.team, + properties={}, + ), + ) + ( + _create_event( + distinct_id="person_1", + event="custom_event_3", + team=self.team, + properties={}, + ), + ) + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ), + ) # should be ignored, + ( + _create_event( + distinct_id="person_2", + event="custom_event_1", + team=self.team, + properties={}, + ), + ) + ( + _create_event( + distinct_id="person_2", + event="custom_event_2", + team=self.team, + properties={}, + ), + ) + ( + _create_event( + distinct_id="person_2", + event="custom_event_3", + team=self.team, + properties={}, + ), + ) + ( + _create_event( + distinct_id="person_3", + event="custom_event_2", + team=self.team, + properties={}, + ), + ) + ( + _create_event( + distinct_id="person_3", + event="custom_event_1", + team=self.team, + properties={}, + ), + ) + ( + _create_event( + distinct_id="person_4", + event="custom_event_1", + team=self.team, + properties={}, + ), + ) + ( + _create_event( + distinct_id="person_4", + event="custom_event_2", + team=self.team, + properties={}, + ), + ) filter = PathFilter(team=self.team, data={"path_type": "custom_event"}) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) @@ -253,21 +331,93 @@ def test_custom_hogql_paths(self): _create_person(team_id=self.team.pk, distinct_ids=["person_3"]) _create_person(team_id=self.team.pk, distinct_ids=["person_4"]) - _create_event(distinct_id="person_1", event="custom_event_1", team=self.team, properties={"a": "!"}), - _create_event(distinct_id="person_1", event="custom_event_3", team=self.team, properties={"a": "!"}), - _create_event( - properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team - ), # should be ignored, - _create_event(distinct_id="person_2", event="custom_event_1", team=self.team, properties={"a": "!"}), - _create_event(distinct_id="person_2", event="custom_event_2", team=self.team, properties={"a": "!"}), - _create_event(distinct_id="person_2", event="custom_event_3", team=self.team, properties={"a": "!"}), - _create_event(distinct_id="person_3", event="custom_event_2", team=self.team, properties={"a": "!"}), - _create_event(distinct_id="person_3", event="custom_event_1", team=self.team, properties={"a": "!"}), - _create_event(distinct_id="person_4", event="custom_event_1", team=self.team, properties={"a": "!"}), - _create_event(distinct_id="person_4", event="custom_event_2", team=self.team, properties={"a": "!"}), + ( + _create_event( + distinct_id="person_1", + event="custom_event_1", + team=self.team, + properties={"a": "!"}, + ), + ) + ( + _create_event( + distinct_id="person_1", + event="custom_event_3", + team=self.team, + properties={"a": "!"}, + ), + ) + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ), + ) # should be ignored, + ( + _create_event( + distinct_id="person_2", + event="custom_event_1", + team=self.team, + properties={"a": "!"}, + ), + ) + ( + _create_event( + distinct_id="person_2", + event="custom_event_2", + team=self.team, + properties={"a": "!"}, + ), + ) + ( + _create_event( + distinct_id="person_2", + event="custom_event_3", + team=self.team, + properties={"a": "!"}, + ), + ) + ( + _create_event( + distinct_id="person_3", + event="custom_event_2", + team=self.team, + properties={"a": "!"}, + ), + ) + ( + _create_event( + distinct_id="person_3", + event="custom_event_1", + team=self.team, + properties={"a": "!"}, + ), + ) + ( + _create_event( + distinct_id="person_4", + event="custom_event_1", + team=self.team, + properties={"a": "!"}, + ), + ) + ( + _create_event( + distinct_id="person_4", + event="custom_event_2", + team=self.team, + properties={"a": "!"}, + ), + ) filter = PathFilter( - data={"path_type": "hogql", "paths_hogql_expression": "event || properties.a"}, team=self.team + data={ + "path_type": "hogql", + "paths_hogql_expression": "event || properties.a", + }, + team=self.team, ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) @@ -293,17 +443,78 @@ def test_screen_paths(self): _create_person(team_id=self.team.pk, distinct_ids=["person_3"]) _create_person(team_id=self.team.pk, distinct_ids=["person_4"]) - _create_event(properties={"$screen_name": "/"}, distinct_id="person_1", event="$screen", team=self.team), - _create_event(properties={"$screen_name": "/about"}, distinct_id="person_1", event="$screen", team=self.team), - _create_event(properties={"$screen_name": "/"}, distinct_id="person_2b", event="$screen", team=self.team), - _create_event( - properties={"$screen_name": "/pricing"}, distinct_id="person_2a", event="$screen", team=self.team - ), - _create_event(properties={"$screen_name": "/about"}, distinct_id="person_2b", event="$screen", team=self.team), - _create_event(properties={"$screen_name": "/pricing"}, distinct_id="person_3", event="$screen", team=self.team), - _create_event(properties={"$screen_name": "/"}, distinct_id="person_3", event="$screen", team=self.team), - _create_event(properties={"$screen_name": "/"}, distinct_id="person_4", event="$screen", team=self.team), - _create_event(properties={"$screen_name": "/pricing"}, distinct_id="person_4", event="$screen", team=self.team), + ( + _create_event( + properties={"$screen_name": "/"}, + distinct_id="person_1", + event="$screen", + team=self.team, + ), + ) + ( + _create_event( + properties={"$screen_name": "/about"}, + distinct_id="person_1", + event="$screen", + team=self.team, + ), + ) + ( + _create_event( + properties={"$screen_name": "/"}, + distinct_id="person_2b", + event="$screen", + team=self.team, + ), + ) + ( + _create_event( + properties={"$screen_name": "/pricing"}, + distinct_id="person_2a", + event="$screen", + team=self.team, + ), + ) + ( + _create_event( + properties={"$screen_name": "/about"}, + distinct_id="person_2b", + event="$screen", + team=self.team, + ), + ) + ( + _create_event( + properties={"$screen_name": "/pricing"}, + distinct_id="person_3", + event="$screen", + team=self.team, + ), + ) + ( + _create_event( + properties={"$screen_name": "/"}, + distinct_id="person_3", + event="$screen", + team=self.team, + ), + ) + ( + _create_event( + properties={"$screen_name": "/"}, + distinct_id="person_4", + event="$screen", + team=self.team, + ), + ) + ( + _create_event( + properties={"$screen_name": "/pricing"}, + distinct_id="person_4", + event="$screen", + team=self.team, + ), + ) filter = PathFilter(team=self.team, data={"path_type": "$screen"}) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) @@ -329,47 +540,82 @@ def test_paths_properties_filter(self): _create_person(team_id=self.team.pk, distinct_ids=["person_3"]) _create_person(team_id=self.team.pk, distinct_ids=["person_4"]) - _create_event( - properties={"$current_url": "/", "$browser": "Chrome"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - ), - _create_event( - properties={"$current_url": "/about", "$browser": "Chrome"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - ), - _create_event( - properties={"$current_url": "/", "$browser": "Chrome"}, - distinct_id="person_2", - event="$pageview", - team=self.team, - ), - _create_event( - properties={"$current_url": "/pricing", "$browser": "Chrome"}, - distinct_id="person_2", - event="$pageview", - team=self.team, - ), - _create_event( - properties={"$current_url": "/about", "$browser": "Chrome"}, - distinct_id="person_2", - event="$pageview", - team=self.team, - ), - _create_event( - properties={"$current_url": "/pricing"}, distinct_id="person_3", event="$pageview", team=self.team - ), - _create_event(properties={"$current_url": "/"}, distinct_id="person_3", event="$pageview", team=self.team), - _create_event(properties={"$current_url": "/"}, distinct_id="person_4", event="$pageview", team=self.team), - _create_event( - properties={"$current_url": "/pricing"}, distinct_id="person_4", event="$pageview", team=self.team - ), + ( + _create_event( + properties={"$current_url": "/", "$browser": "Chrome"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/about", "$browser": "Chrome"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/", "$browser": "Chrome"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/pricing", "$browser": "Chrome"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/about", "$browser": "Chrome"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/pricing"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_4", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/pricing"}, + distinct_id="person_4", + event="$pageview", + team=self.team, + ), + ) filter = PathFilter( - team=self.team, data={"properties": [{"key": "$browser", "value": "Chrome", "type": "event"}]} + team=self.team, + data={"properties": [{"key": "$browser", "value": "Chrome", "type": "event"}]}, ) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) @@ -393,36 +639,118 @@ def test_paths_start(self): _create_person(team_id=self.team.pk, distinct_ids=["person_4"]) _create_person(team_id=self.team.pk, distinct_ids=["person_5a", "person_5b"]) - _create_event(properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team), - _create_event( - properties={"$current_url": "/about/"}, distinct_id="person_1", event="$pageview", team=self.team - ), - _create_event(properties={"$current_url": "/"}, distinct_id="person_2", event="$pageview", team=self.team), - _create_event( - properties={"$current_url": "/pricing/"}, distinct_id="person_2", event="$pageview", team=self.team - ), - _create_event(properties={"$current_url": "/about"}, distinct_id="person_2", event="$pageview", team=self.team), - _create_event( - properties={"$current_url": "/pricing"}, distinct_id="person_3", event="$pageview", team=self.team - ), - _create_event(properties={"$current_url": "/"}, distinct_id="person_3", event="$pageview", team=self.team), - _create_event( - properties={"$current_url": "/about/"}, distinct_id="person_3", event="$pageview", team=self.team - ), - _create_event(properties={"$current_url": "/"}, distinct_id="person_4", event="$pageview", team=self.team), - _create_event( - properties={"$current_url": "/pricing/"}, distinct_id="person_4", event="$pageview", team=self.team - ), - _create_event( - properties={"$current_url": "/pricing"}, distinct_id="person_5a", event="$pageview", team=self.team - ), - _create_event( - properties={"$current_url": "/about"}, distinct_id="person_5b", event="$pageview", team=self.team - ), - _create_event( - properties={"$current_url": "/pricing/"}, distinct_id="person_5a", event="$pageview", team=self.team - ), - _create_event(properties={"$current_url": "/help"}, distinct_id="person_5b", event="$pageview", team=self.team), + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/about/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/pricing/"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_2", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/pricing"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/about/"}, + distinct_id="person_3", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_4", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/pricing/"}, + distinct_id="person_4", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/pricing"}, + distinct_id="person_5a", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_5b", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/pricing/"}, + distinct_id="person_5a", + event="$pageview", + team=self.team, + ), + ) + ( + _create_event( + properties={"$current_url": "/help"}, + distinct_id="person_5b", + event="$pageview", + team=self.team, + ), + ) response = self.client.get( f"/api/projects/{self.team.id}/insights/path/?type=%24pageview&start=%2Fpricing" @@ -463,34 +791,42 @@ def test_paths_start(self): def test_paths_in_window(self): _create_person(team_id=self.team.pk, distinct_ids=["person_1"]) - _create_event( - properties={"$current_url": "/"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2020-04-14 03:25:34", - ), - _create_event( - properties={"$current_url": "/about"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2020-04-14 03:30:34", - ), - _create_event( - properties={"$current_url": "/"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2020-04-15 03:25:34", - ), - _create_event( - properties={"$current_url": "/about"}, - distinct_id="person_1", - event="$pageview", - team=self.team, - timestamp="2020-04-15 03:30:34", - ), + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2020-04-14 03:25:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2020-04-14 03:30:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2020-04-15 03:25:34", + ), + ) + ( + _create_event( + properties={"$current_url": "/about"}, + distinct_id="person_1", + event="$pageview", + team=self.team, + timestamp="2020-04-15 03:30:34", + ), + ) filter = PathFilter(team=self.team, data={"date_from": "2020-04-13"}) response = Paths(team=self.team, filter=filter).run(team=self.team, filter=filter) diff --git a/posthog/queries/test/test_query_date_range.py b/posthog/queries/test/test_query_date_range.py index 15e7944502ba6..ebb21ad90e5ac 100644 --- a/posthog/queries/test/test_query_date_range.py +++ b/posthog/queries/test/test_query_date_range.py @@ -7,7 +7,6 @@ class TestQueryDateRange(APIBaseTest): def test_parsed_date(self): - with freeze_time("2021-08-25T00:00:00.000Z"): filter = Filter( data={ @@ -31,7 +30,6 @@ def test_parsed_date(self): ) def test_parsed_date_hour(self): - with freeze_time("2021-08-25T00:00:00.000Z"): filter = Filter( data={ @@ -55,7 +53,6 @@ def test_parsed_date_hour(self): ) # ensure last hour is included def test_parsed_date_middle_of_hour(self): - with freeze_time("2021-08-25T00:00:00.000Z"): filter = Filter( data={ @@ -80,7 +77,6 @@ def test_parsed_date_middle_of_hour(self): ) # ensure last hour is included def test_parsed_date_week_rounded(self): - with freeze_time("2021-08-25T00:00:00.000Z"): filter = Filter( data={ @@ -104,7 +100,6 @@ def test_parsed_date_week_rounded(self): ) def test_is_hourly(self): - with freeze_time("2021-08-25T00:00:00.000Z"): filter = Filter( data={ diff --git a/posthog/queries/test/test_retention.py b/posthog/queries/test/test_retention.py index 42b7c596b14a9..5b823d462d917 100644 --- a/posthog/queries/test/test_retention.py +++ b/posthog/queries/test/test_retention.py @@ -57,7 +57,13 @@ def _create_events(team, user_and_timestamps, event="$pageview"): if len(properties_args) == 1: properties.update(properties_args[0]) - _create_event(team=team, event=event, distinct_id=distinct_id, timestamp=timestamp, properties=properties) + _create_event( + team=team, + event=event, + distinct_id=distinct_id, + timestamp=timestamp, + properties=properties, + ) i += 1 @@ -126,7 +132,19 @@ def test_day_interval(self): self.assertEqual(len(result), 11) self.assertEqual( pluck(result, "label"), - ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], + [ + "Day 0", + "Day 1", + "Day 2", + "Day 3", + "Day 4", + "Day 5", + "Day 6", + "Day 7", + "Day 8", + "Day 9", + "Day 10", + ], ) self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) @@ -148,8 +166,16 @@ def test_day_interval(self): ) def test_month_interval(self): - _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"}) - _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"}) + _create_person( + team=self.team, + distinct_ids=["person1", "alias1"], + properties={"email": "person1@test.com"}, + ) + _create_person( + team=self.team, + distinct_ids=["person2"], + properties={"email": "person2@test.com"}, + ) _create_events( self.team, @@ -227,8 +253,16 @@ def test_month_interval(self): @override_settings(PERSON_ON_EVENTS_V2_OVERRIDE=True) @snapshot_clickhouse_queries def test_month_interval_with_person_on_events_v2(self): - _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"}) - _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"}) + _create_person( + team=self.team, + distinct_ids=["person1", "alias1"], + properties={"email": "person1@test.com"}, + ) + _create_person( + team=self.team, + distinct_ids=["person2"], + properties={"email": "person2@test.com"}, + ) person_id1 = str(uuid.uuid4()) person_id2 = str(uuid.uuid4()) @@ -386,8 +420,16 @@ def test_month_interval_with_person_on_events_v2(self): ) def test_week_interval(self): - _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"}) - _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"}) + _create_person( + team=self.team, + distinct_ids=["person1", "alias1"], + properties={"email": "person1@test.com"}, + ) + _create_person( + team=self.team, + distinct_ids=["person2"], + properties={"email": "person2@test.com"}, + ) _create_events( self.team, @@ -408,17 +450,32 @@ def test_week_interval(self): ) result = retention().run( - RetentionFilter(data={"date_to": _date(10, month=1, hour=0), "period": "Week", "total_intervals": 7}), + RetentionFilter( + data={ + "date_to": _date(10, month=1, hour=0), + "period": "Week", + "total_intervals": 7, + } + ), self.team, ) self.assertEqual( - pluck(result, "label"), ["Week 0", "Week 1", "Week 2", "Week 3", "Week 4", "Week 5", "Week 6"] + pluck(result, "label"), + ["Week 0", "Week 1", "Week 2", "Week 3", "Week 4", "Week 5", "Week 6"], ) self.assertEqual( pluck(result, "values", "count"), - [[2, 2, 1, 2, 2, 0, 1], [2, 1, 2, 2, 0, 1], [1, 1, 1, 0, 0], [2, 2, 0, 1], [2, 0, 1], [0, 0], [1]], + [ + [2, 2, 1, 2, 2, 0, 1], + [2, 1, 2, 2, 0, 1], + [1, 1, 1, 0, 0], + [2, 2, 0, 1], + [2, 0, 1], + [0, 0], + [1], + ], ) self.assertEqual( @@ -435,8 +492,16 @@ def test_week_interval(self): ) def test_hour_interval(self): - _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"}) - _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"}) + _create_person( + team=self.team, + distinct_ids=["person1", "alias1"], + properties={"email": "person1@test.com"}, + ) + _create_person( + team=self.team, + distinct_ids=["person2"], + properties={"email": "person2@test.com"}, + ) _create_events( self.team, @@ -513,8 +578,16 @@ def test_hour_interval(self): # ensure that the first interval is properly rounded acoording to the specified period def test_interval_rounding(self): - _create_person(team=self.team, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"}) - _create_person(team=self.team, distinct_ids=["person2"], properties={"email": "person2@test.com"}) + _create_person( + team=self.team, + distinct_ids=["person1", "alias1"], + properties={"email": "person1@test.com"}, + ) + _create_person( + team=self.team, + distinct_ids=["person2"], + properties={"email": "person2@test.com"}, + ) _create_events( self.team, @@ -535,17 +608,32 @@ def test_interval_rounding(self): ) result = retention().run( - RetentionFilter(data={"date_to": _date(14, month=1, hour=0), "period": "Week", "total_intervals": 7}), + RetentionFilter( + data={ + "date_to": _date(14, month=1, hour=0), + "period": "Week", + "total_intervals": 7, + } + ), self.team, ) self.assertEqual( - pluck(result, "label"), ["Week 0", "Week 1", "Week 2", "Week 3", "Week 4", "Week 5", "Week 6"] + pluck(result, "label"), + ["Week 0", "Week 1", "Week 2", "Week 3", "Week 4", "Week 5", "Week 6"], ) self.assertEqual( pluck(result, "values", "count"), - [[2, 2, 1, 2, 2, 0, 1], [2, 1, 2, 2, 0, 1], [1, 1, 1, 0, 0], [2, 2, 0, 1], [2, 0, 1], [0, 0], [1]], + [ + [2, 2, 1, 2, 2, 0, 1], + [2, 1, 2, 2, 0, 1], + [1, 1, 1, 0, 0], + [2, 2, 0, 1], + [2, 0, 1], + [0, 0], + [1], + ], ) self.assertEqual( @@ -583,7 +671,11 @@ def test_retention_people_basic(self): # even if set to hour 6 it should default to beginning of day and include all pageviews above result, _ = retention().actors_in_period( - RetentionFilter(data={"date_to": _date(10, hour=6), "selected_interval": 0}, team=self.team), self.team + RetentionFilter( + data={"date_to": _date(10, hour=6), "selected_interval": 0}, + team=self.team, + ), + self.team, ) self.assertEqual(len(result), 1) self.assertTrue(result[0]["person"]["id"] == person1.uuid, person1.uuid) @@ -632,12 +724,18 @@ def test_retention_people_paginated(self): _create_person(team_id=self.team.pk, distinct_ids=[person_id]) _create_events( self.team, - [(person_id, _date(0)), (person_id, _date(1)), (person_id, _date(2)), (person_id, _date(5))], + [ + (person_id, _date(0)), + (person_id, _date(1)), + (person_id, _date(2)), + (person_id, _date(5)), + ], ) # even if set to hour 6 it should default to beginning of day and include all pageviews above result = self.client.get( - "/api/person/retention", data={"date_to": _date(10, hour=6), "selected_interval": 2} + "/api/person/retention", + data={"date_to": _date(10, hour=6), "selected_interval": 2}, ).json() self.assertEqual(len(result["result"]), 100) @@ -650,7 +748,8 @@ def test_retention_invalid_properties(self): self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertDictEqual( - response.json(), self.validation_error_response("Properties are unparsable!", "invalid_input") + response.json(), + self.validation_error_response("Properties are unparsable!", "invalid_input"), ) def test_retention_people_in_period(self): @@ -676,7 +775,11 @@ def test_retention_people_in_period(self): # even if set to hour 6 it should default to beginning of day and include all pageviews above result, _ = retention().actors_in_period( - RetentionFilter(data={"date_to": _date(10, hour=6), "selected_interval": 2}, team=self.team), self.team + RetentionFilter( + data={"date_to": _date(10, hour=6), "selected_interval": 2}, + team=self.team, + ), + self.team, ) # should be descending order on number of appearances @@ -732,7 +835,9 @@ def test_retention_multiple_events(self): ) _create_events( - self.team, [("person1", _date(5)), ("person1", _date(6)), ("person2", _date(5))], "$pageview" + self.team, + [("person1", _date(5)), ("person1", _date(6)), ("person2", _date(5))], + "$pageview", ) target_entity = json.dumps({"id": first_event, "type": TREND_FILTER_TYPE_EVENTS}) @@ -748,11 +853,22 @@ def test_retention_multiple_events(self): self.team, ) self.assertEqual(len(result), 7) - self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) + self.assertEqual( + pluck(result, "label"), + ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"], + ) self.assertEqual( pluck(result, "values", "count"), - [[2, 0, 0, 0, 0, 2, 1], [2, 0, 0, 0, 2, 1], [2, 0, 0, 2, 1], [2, 0, 2, 1], [0, 0, 0], [1, 0], [0]], + [ + [2, 0, 0, 0, 0, 2, 1], + [2, 0, 0, 0, 2, 1], + [2, 0, 0, 2, 1], + [2, 0, 2, 1], + [0, 0, 0], + [1, 0], + [0], + ], ) def test_retention_any_event(self): @@ -778,7 +894,9 @@ def test_retention_any_event(self): ) _create_events( - self.team, [("person1", _date(5)), ("person1", _date(6)), ("person2", _date(5))], "$pageview" + self.team, + [("person1", _date(5)), ("person1", _date(6)), ("person2", _date(5))], + "$pageview", ) result = retention().run( @@ -793,11 +911,22 @@ def test_retention_any_event(self): self.team, ) self.assertEqual(len(result), 7) - self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) + self.assertEqual( + pluck(result, "label"), + ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"], + ) self.assertEqual( pluck(result, "values", "count"), - [[2, 2, 2, 2, 0, 2, 1], [2, 2, 2, 0, 2, 1], [2, 2, 0, 2, 1], [2, 0, 2, 1], [0, 0, 0], [3, 1], [1]], + [ + [2, 2, 2, 2, 0, 2, 1], + [2, 2, 2, 0, 2, 1], + [2, 2, 0, 2, 1], + [2, 0, 2, 1], + [0, 0, 0], + [3, 1], + [1], + ], ) @snapshot_clickhouse_queries @@ -828,7 +957,10 @@ def test_retention_event_action(self): data={ "date_to": _date(6, hour=0), "target_entity": start_entity, - "returning_entity": {"id": some_event, "type": TREND_FILTER_TYPE_EVENTS}, + "returning_entity": { + "id": some_event, + "type": TREND_FILTER_TYPE_EVENTS, + }, "total_intervals": 7, } ), @@ -836,12 +968,23 @@ def test_retention_event_action(self): ) self.assertEqual(len(result), 7) - self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) + self.assertEqual( + pluck(result, "label"), + ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"], + ) self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), - [[2, 0, 0, 1, 0, 1, 0], [2, 0, 1, 0, 1, 0], [2, 1, 0, 1, 0], [2, 0, 1, 0], [0, 0, 0], [0, 0], [0]], + [ + [2, 0, 0, 1, 0, 1, 0], + [2, 0, 1, 0, 1, 0], + [2, 1, 0, 1, 0], + [2, 0, 1, 0], + [0, 0, 0], + [0, 0], + [0], + ], ) def test_first_time_retention(self): @@ -862,11 +1005,22 @@ def test_first_time_retention(self): ) self.assertEqual(len(result), 7) - self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) + self.assertEqual( + pluck(result, "label"), + ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"], + ) self.assertEqual( pluck(result, "values", "count"), - [[2, 1, 2, 2, 1, 0, 1], [1, 1, 0, 1, 1, 1], [0, 0, 0, 0, 0], [1, 1, 0, 1], [0, 0, 0], [0, 0], [0]], + [ + [2, 1, 2, 2, 1, 0, 1], + [1, 1, 0, 1, 1, 1], + [0, 0, 0, 0, 0], + [1, 1, 0, 1], + [0, 0, 0], + [0, 0], + [0], + ], ) def test_retention_with_properties(self): @@ -891,14 +1045,29 @@ def test_retention_with_properties(self): result = retention().run( RetentionFilter( - data={"properties": [{"key": "$some_property", "value": "value"}], "date_to": _date(10, hour=0)} + data={ + "properties": [{"key": "$some_property", "value": "value"}], + "date_to": _date(10, hour=0), + } ), self.team, ) self.assertEqual(len(result), 11) self.assertEqual( pluck(result, "label"), - ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], + [ + "Day 0", + "Day 1", + "Day 2", + "Day 3", + "Day 4", + "Day 5", + "Day 6", + "Day 7", + "Day 8", + "Day 9", + "Day 10", + ], ) self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) @@ -921,9 +1090,15 @@ def test_retention_with_properties(self): def test_retention_with_user_properties(self): _create_person( - team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"} + team_id=self.team.pk, + distinct_ids=["person1", "alias1"], + properties={"email": "person1@test.com"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"email": "person2@test.com"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "person2@test.com"}) _create_events( self.team, @@ -944,7 +1119,13 @@ def test_retention_with_user_properties(self): result = retention().run( RetentionFilter( data={ - "properties": [{"key": "email", "value": "person1@test.com", "type": "person"}], + "properties": [ + { + "key": "email", + "value": "person1@test.com", + "type": "person", + } + ], "date_to": _date(6, hour=0), "total_intervals": 7, } @@ -953,11 +1134,22 @@ def test_retention_with_user_properties(self): ) self.assertEqual(len(result), 7) - self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) + self.assertEqual( + pluck(result, "label"), + ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"], + ) self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), - [[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]], + [ + [1, 1, 1, 0, 0, 1, 1], + [1, 1, 0, 0, 1, 1], + [1, 0, 0, 1, 1], + [0, 0, 0, 0], + [0, 0, 0], + [1, 1], + [1], + ], ) @snapshot_clickhouse_queries @@ -970,9 +1162,15 @@ def test_retention_with_user_properties_via_action(self): ) _create_person( - team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"email": "person1@test.com"} + team_id=self.team.pk, + distinct_ids=["person1", "alias1"], + properties={"email": "person1@test.com"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"email": "person2@test.com"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "person2@test.com"}) _create_events( self.team, @@ -1003,11 +1201,22 @@ def test_retention_with_user_properties_via_action(self): ) self.assertEqual(len(result), 7) - self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) + self.assertEqual( + pluck(result, "label"), + ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"], + ) self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), - [[1, 1, 1, 0, 0, 1, 1], [1, 1, 0, 0, 1, 1], [1, 0, 0, 1, 1], [0, 0, 0, 0], [0, 0, 0], [1, 1], [1]], + [ + [1, 1, 1, 0, 0, 1, 1], + [1, 1, 0, 0, 1, 1], + [1, 0, 0, 1, 1], + [0, 0, 0, 0], + [0, 0, 0], + [1, 1], + [1], + ], ) def test_retention_action_start_point(self): @@ -1044,17 +1253,30 @@ def test_retention_action_start_point(self): ) self.assertEqual(len(result), 7) - self.assertEqual(pluck(result, "label"), ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"]) + self.assertEqual( + pluck(result, "label"), + ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6"], + ) self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) self.assertEqual( pluck(result, "values", "count"), - [[1, 1, 1, 0, 0, 1, 1], [2, 2, 1, 0, 1, 2], [2, 1, 0, 1, 2], [1, 0, 0, 1], [0, 0, 0], [1, 1], [2]], + [ + [1, 1, 1, 0, 0, 1, 1], + [2, 2, 1, 0, 1, 2], + [2, 1, 0, 1, 2], + [1, 0, 0, 1], + [0, 0, 0], + [1, 1], + [2], + ], ) def test_filter_test_accounts(self): _create_person( - team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"email": "test@posthog.com"} + team_id=self.team.pk, + distinct_ids=["person1", "alias1"], + properties={"email": "test@posthog.com"}, ) _create_person(team_id=self.team.pk, distinct_ids=["person2"]) @@ -1076,13 +1298,28 @@ def test_filter_test_accounts(self): # even if set to hour 6 it should default to beginning of day and include all pageviews above result = retention().run( - RetentionFilter(data={"date_to": _date(10, hour=6), FILTER_TEST_ACCOUNTS: True}, team=self.team), + RetentionFilter( + data={"date_to": _date(10, hour=6), FILTER_TEST_ACCOUNTS: True}, + team=self.team, + ), self.team, ) self.assertEqual(len(result), 11) self.assertEqual( pluck(result, "label"), - ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], + [ + "Day 0", + "Day 1", + "Day 2", + "Day 3", + "Day 4", + "Day 5", + "Day 6", + "Day 7", + "Day 8", + "Day 9", + "Day 10", + ], ) self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) @@ -1144,7 +1381,13 @@ def _create_first_time_retention_events(self): _create_events(self.team, [("person3", _date(0))], "$user_signed_up") _create_events( - self.team, [("person3", _date(1)), ("person3", _date(3)), ("person3", _date(4)), ("person3", _date(5))] + self.team, + [ + ("person3", _date(1)), + ("person3", _date(3)), + ("person3", _date(4)), + ("person3", _date(5)), + ], ) _create_events(self.team, [("person4", _date(2))], "$user_signed_up") @@ -1154,7 +1397,11 @@ def _create_first_time_retention_events(self): return p1, p2, p3, p4 def test_retention_aggregate_by_distinct_id(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1", "alias1"], properties={"test": "ok"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person1", "alias1"], + properties={"test": "ok"}, + ) _create_person(team_id=self.team.pk, distinct_ids=["person2"]) _create_events( @@ -1203,7 +1450,14 @@ def test_retention_aggregate_by_distinct_id(self): [2, 1, 0, 1, 2, 0, 0, 0, 0], [1, 0, 0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0], - [2, 1, 0, 0, 0, 0], # this first day is different b/c of the distinct_id aggregation + [ + 2, + 1, + 0, + 0, + 0, + 0, + ], # this first day is different b/c of the distinct_id aggregation [2, 0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0], @@ -1229,7 +1483,14 @@ def test_retention_aggregate_by_distinct_id(self): [1, 0, 0, 1, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0], - [2, 1, 0, 0, 0, 0], # this first day is different b/c of the distinct_id aggregation + [ + 2, + 1, + 0, + 0, + 0, + 0, + ], # this first day is different b/c of the distinct_id aggregation [1, 0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0], @@ -1248,26 +1509,48 @@ def test_timezones(self): [ ("person1", _date(-1, 1)), ("person1", _date(0, 1)), - ("person1", _date(1, 1)), # this is the only event in US Pacific on the first day + ( + "person1", + _date(1, 1), + ), # this is the only event in US Pacific on the first day ("person2", _date(6, 1)), ("person2", _date(6, 9)), ], ) - result = retention().run(RetentionFilter(data={"date_to": _date(10, hour=6)}, team=self.team), self.team) + result = retention().run( + RetentionFilter(data={"date_to": _date(10, hour=6)}, team=self.team), + self.team, + ) self.team.timezone = "US/Pacific" self.team.save() result_pacific = retention().run( - RetentionFilter(data={"date_to": _date(10, hour=6)}, team=self.team), self.team + RetentionFilter(data={"date_to": _date(10, hour=6)}, team=self.team), + self.team, ) self.assertEqual( pluck(result_pacific, "label"), - ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], + [ + "Day 0", + "Day 1", + "Day 2", + "Day 3", + "Day 4", + "Day 5", + "Day 6", + "Day 7", + "Day 8", + "Day 9", + "Day 10", + ], ) - self.assertEqual(result_pacific[0]["date"], datetime(2020, 6, 10, tzinfo=ZoneInfo("US/Pacific"))) + self.assertEqual( + result_pacific[0]["date"], + datetime(2020, 6, 10, tzinfo=ZoneInfo("US/Pacific")), + ) self.assertEqual(result_pacific[0]["date"].isoformat(), "2020-06-10T00:00:00-07:00") self.assertEqual( @@ -1327,12 +1610,25 @@ def test_day_interval_sampled(self): # even if set to hour 6 it should default to beginning of day and include all pageviews above result = retention().run( - RetentionFilter(data={"date_to": _date(10, hour=6), "sampling_factor": 1}), self.team + RetentionFilter(data={"date_to": _date(10, hour=6), "sampling_factor": 1}), + self.team, ) self.assertEqual(len(result), 11) self.assertEqual( pluck(result, "label"), - ["Day 0", "Day 1", "Day 2", "Day 3", "Day 4", "Day 5", "Day 6", "Day 7", "Day 8", "Day 9", "Day 10"], + [ + "Day 0", + "Day 1", + "Day 2", + "Day 3", + "Day 4", + "Day 5", + "Day 6", + "Day 7", + "Day 8", + "Day 9", + "Day 10", + ], ) self.assertEqual(result[0]["date"], datetime(2020, 6, 10, 0, tzinfo=ZoneInfo("UTC"))) diff --git a/posthog/queries/test/test_trends.py b/posthog/queries/test/test_trends.py index 3cce0cfd1907a..63b7024d3d6bf 100644 --- a/posthog/queries/test/test_trends.py +++ b/posthog/queries/test/test_trends.py @@ -32,7 +32,11 @@ Person, ) from posthog.models.group.util import create_group -from posthog.models.instance_setting import get_instance_setting, override_instance_config, set_instance_setting +from posthog.models.instance_setting import ( + get_instance_setting, + override_instance_config, + set_instance_setting, +) from posthog.models.person.util import create_person_distinct_id from posthog.queries.trends.trends import Trends from posthog.test.base import ( @@ -54,7 +58,7 @@ def breakdown_label(entity: Entity, value: Union[str, int]) -> Dict[str, Optional[Union[str, int]]]: ret_dict: Dict[str, Optional[Union[str, int]]] = {} if not value or not isinstance(value, str) or "cohort_" not in value: - label = value if (value or type(value) == bool) and value != "None" and value != "nan" else "Other" + label = value if (value or isinstance(value, bool)) and value != "None" and value != "nan" else "Other" ret_dict["label"] = f"{entity.name} - {label}" ret_dict["breakdown_value"] = label else: @@ -104,14 +108,19 @@ def _get_trend_people(self, filter: Filter, entity: Entity): return response["results"][0]["people"] def _create_events(self, use_time=False) -> Tuple[Action, Person]: - person = _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) _, _, secondTeam = Organization.objects.bootstrap(None, team_fields={"api_token": "token456"}) freeze_without_time = ["2019-12-24", "2020-01-01", "2020-01-02"] - freeze_with_time = ["2019-12-24 03:45:34", "2020-01-01 00:06:34", "2020-01-02 16:34:34"] + freeze_with_time = [ + "2019-12-24 03:45:34", + "2020-01-01 00:06:34", + "2020-01-02 16:34:34", + ] freeze_args = freeze_without_time if use_time: @@ -132,20 +141,31 @@ def _create_events(self, use_time=False) -> Tuple[Action, Person]: distinct_id="blabla", properties={"$some_property": "value", "$bool_prop": False}, ) - _create_event(team=self.team, event="sign up", distinct_id="anonymous_id", properties={"$bool_prop": False}) + _create_event( + team=self.team, + event="sign up", + distinct_id="anonymous_id", + properties={"$bool_prop": False}, + ) _create_event(team=self.team, event="sign up", distinct_id="blabla") with freeze_time(freeze_args[2]): _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$some_property": "other_value", "$some_numerical_prop": 80}, + properties={ + "$some_property": "other_value", + "$some_numerical_prop": 80, + }, ) _create_event(team=self.team, event="no events", distinct_id="blabla") # second team should have no effect _create_event( - team=secondTeam, event="sign up", distinct_id="blabla", properties={"$some_property": "other_value"} + team=secondTeam, + event="sign up", + distinct_id="blabla", + properties={"$some_property": "other_value"}, ) _create_action(team=self.team, name="no events") @@ -160,14 +180,27 @@ def _create_breakdown_events(self): with freeze_time(freeze_without_time[0]): for i in range(25): - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": i}) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$some_property": i}, + ) _create_action(team=self.team, name="sign up") def _create_event_count_per_actor_events(self): - _create_person(team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"fruit": "mango"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"fruit": "mango"}, + ) _create_person(team_id=self.team.pk, distinct_ids=["tintin"], properties={"fruit": "mango"}) _create_person(team_id=self.team.pk, distinct_ids=["murmur"], properties={}) # No fruit here - _create_person(team_id=self.team.pk, distinct_ids=["reeree"], properties={"fruit": "tomato"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["reeree"], + properties={"fruit": "tomato"}, + ) with freeze_time("2020-01-01 00:06:02"): _create_event( @@ -177,7 +210,10 @@ def _create_event_count_per_actor_events(self): properties={"color": "red", "$group_0": "bouba"}, ) _create_event( - team=self.team, event="viewed video", distinct_id="blabla", properties={"$group_0": "bouba"} + team=self.team, + event="viewed video", + distinct_id="blabla", + properties={"$group_0": "bouba"}, ) # No color here _create_event( team=self.team, @@ -185,10 +221,20 @@ def _create_event_count_per_actor_events(self): distinct_id="reeree", properties={"color": "blue", "$group_0": "bouba"}, ) - _create_event(team=self.team, event="sign up", distinct_id="tintin", properties={"$group_0": "kiki"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="tintin", + properties={"$group_0": "kiki"}, + ) with freeze_time("2020-01-03 19:06:34"): - _create_event(team=self.team, event="sign up", distinct_id="murmur", properties={"$group_0": "kiki"}) + _create_event( + team=self.team, + event="sign up", + distinct_id="murmur", + properties={"$group_0": "kiki"}, + ) with freeze_time("2020-01-04 23:17:00"): _create_event( @@ -206,7 +252,10 @@ def _create_event_count_per_actor_events(self): properties={"color": "blue", "$group_0": "bouba"}, ) _create_event( - team=self.team, event="viewed video", distinct_id="tintin", properties={"color": "red"} + team=self.team, + event="viewed video", + distinct_id="tintin", + properties={"color": "red"}, ) # No group here _create_event( team=self.team, @@ -226,7 +275,13 @@ def test_trends_per_day(self): with freeze_time("2020-01-04T13:00:01Z"): # with self.assertNumQueries(16): response = Trends().run( - Filter(team=self.team, data={"date_from": "-7d", "events": [{"id": "sign up"}, {"id": "no events"}]}), + Filter( + team=self.team, + data={ + "date_from": "-7d", + "events": [{"id": "sign up"}, {"id": "no events"}], + }, + ), self.team, ) self.assertEqual(response[0]["label"], "sign up") @@ -272,7 +327,11 @@ def test_trend_actors_person_on_events_pagination_with_alias_inconsistencies(sel ) flush_persons_and_events() - data = {"date_from": "-7d", "events": [{"id": "sign up", "math": "dau"}], "limit": 5} + data = { + "date_from": "-7d", + "events": [{"id": "sign up", "math": "dau"}], + "limit": 5, + } with override_instance_config("PERSON_ON_EVENTS_ENABLED", True): from posthog.models.team import util @@ -318,7 +377,12 @@ def test_no_props(self): "date_from": "-14d", "breakdown": "$some_property", "events": [ - {"id": "sign up", "name": "sign up", "type": "events", "order": 0}, + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + }, {"id": "no events"}, ], }, @@ -348,7 +412,6 @@ def test_trends_per_day_48hours(self): def test_trends_per_day_cumulative(self): self._create_events() with freeze_time("2020-01-04T13:00:01Z"): - response = Trends().run( Filter( team=self.team, @@ -371,7 +434,6 @@ def test_trends_per_day_cumulative(self): def test_trends_groups_per_day_cumulative(self): self._create_event_count_per_actor_events() with freeze_time("2020-01-06T13:00:01Z"): - response = Trends().run( Filter( team=self.team, @@ -399,7 +461,6 @@ def test_trends_groups_per_day_cumulative(self): def test_trends_breakdown_cumulative(self): self._create_events() with freeze_time("2020-01-04T13:00:01Z"): - response = Trends().run( Filter( team=self.team, @@ -429,7 +490,11 @@ def test_trends_single_aggregate_dau(self): daily_response = Trends().run( Filter( team=self.team, - data={"display": TRENDS_TABLE, "interval": "week", "events": [{"id": "sign up", "math": "dau"}]}, + data={ + "display": TRENDS_TABLE, + "interval": "week", + "events": [{"id": "sign up", "math": "dau"}], + }, ), self.team, ) @@ -438,29 +503,73 @@ def test_trends_single_aggregate_dau(self): weekly_response = Trends().run( Filter( team=self.team, - data={"display": TRENDS_TABLE, "interval": "day", "events": [{"id": "sign up", "math": "dau"}]}, + data={ + "display": TRENDS_TABLE, + "interval": "day", + "events": [{"id": "sign up", "math": "dau"}], + }, ), self.team, ) self.assertEqual(daily_response[0]["aggregated_value"], 1) - self.assertEqual(daily_response[0]["aggregated_value"], weekly_response[0]["aggregated_value"]) + self.assertEqual( + daily_response[0]["aggregated_value"], + weekly_response[0]["aggregated_value"], + ) @also_test_with_materialized_columns(["$math_prop"]) def test_trends_single_aggregate_math(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) with freeze_time("2020-01-01 00:06:34"): - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 1}) - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 1}) - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 1}) - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 2}) - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 3}) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$math_prop": 1}, + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$math_prop": 1}, + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$math_prop": 1}, + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$math_prop": 2}, + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$math_prop": 3}, + ) with freeze_time("2020-01-02 00:06:34"): - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 4}) - _create_event(team=self.team, event="sign up", distinct_id="blabla", properties={"$math_prop": 4}) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$math_prop": 4}, + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$math_prop": 4}, + ) with freeze_time("2020-01-04T13:00:01Z"): daily_response = Trends().run( @@ -469,7 +578,13 @@ def test_trends_single_aggregate_math(self): data={ "display": TRENDS_TABLE, "interval": "week", - "events": [{"id": "sign up", "math": "median", "math_property": "$math_prop"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$math_prop", + } + ], }, ), self.team, @@ -482,21 +597,36 @@ def test_trends_single_aggregate_math(self): data={ "display": TRENDS_TABLE, "interval": "day", - "events": [{"id": "sign up", "math": "median", "math_property": "$math_prop"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$math_prop", + } + ], }, ), self.team, ) self.assertEqual(daily_response[0]["aggregated_value"], 2.0) - self.assertEqual(daily_response[0]["aggregated_value"], weekly_response[0]["aggregated_value"]) + self.assertEqual( + daily_response[0]["aggregated_value"], + weekly_response[0]["aggregated_value"], + ) @snapshot_clickhouse_queries def test_trends_with_session_property_single_aggregate_math(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["blabla2"], + properties={"$some_prop": "some_val"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"}) _create_event( team=self.team, @@ -568,7 +698,13 @@ def test_trends_with_session_property_single_aggregate_math(self): data={ "display": TRENDS_TABLE, "interval": "week", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -581,20 +717,35 @@ def test_trends_with_session_property_single_aggregate_math(self): data={ "display": TRENDS_TABLE, "interval": "day", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, ) self.assertEqual(daily_response[0]["aggregated_value"], 7.5) - self.assertEqual(daily_response[0]["aggregated_value"], weekly_response[0]["aggregated_value"]) + self.assertEqual( + daily_response[0]["aggregated_value"], + weekly_response[0]["aggregated_value"], + ) def test_unique_session_with_session_breakdown(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["blabla2"], + properties={"$some_prop": "some_val"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"}) _create_event( team=self.team, @@ -779,7 +930,9 @@ def test_trends_breakdown_single_aggregate_cohorts(self): def test_trends_breakdown_single_aggregate(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) with freeze_time("2020-01-01 00:06:34"): _create_event( @@ -831,7 +984,11 @@ def test_trends_breakdown_single_aggregate(self): daily_response = Trends().run( Filter( team=self.team, - data={"display": TRENDS_TABLE, "breakdown": "$browser", "events": [{"id": "sign up"}]}, + data={ + "display": TRENDS_TABLE, + "breakdown": "$browser", + "events": [{"id": "sign up"}], + }, ), self.team, ) @@ -848,7 +1005,9 @@ def test_trends_breakdown_single_aggregate_with_zero_person_ids(self): return True _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) with freeze_time("2020-01-01 00:06:34"): _create_event( @@ -935,7 +1094,11 @@ def test_trends_breakdown_single_aggregate_with_zero_person_ids(self): daily_response = Trends().run( Filter( team=self.team, - data={"display": TRENDS_TABLE, "breakdown": "$browser", "events": [{"id": "sign up"}]}, + data={ + "display": TRENDS_TABLE, + "breakdown": "$browser", + "events": [{"id": "sign up"}], + }, ), self.team, ) @@ -948,7 +1111,9 @@ def test_trends_breakdown_single_aggregate_with_zero_person_ids(self): def test_trends_breakdown_single_aggregate_math(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) with freeze_time("2020-01-01 00:06:34"): _create_event( @@ -1004,7 +1169,13 @@ def test_trends_breakdown_single_aggregate_math(self): "display": TRENDS_TABLE, "interval": "day", "breakdown": "$some_property", - "events": [{"id": "sign up", "math": "median", "math_property": "$math_prop"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$math_prop", + } + ], }, ), self.team, @@ -1018,21 +1189,36 @@ def test_trends_breakdown_single_aggregate_math(self): "display": TRENDS_TABLE, "interval": "week", "breakdown": "$some_property", - "events": [{"id": "sign up", "math": "median", "math_property": "$math_prop"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$math_prop", + } + ], }, ), self.team, ) self.assertEqual(daily_response[0]["aggregated_value"], 2.0) - self.assertEqual(daily_response[0]["aggregated_value"], weekly_response[0]["aggregated_value"]) + self.assertEqual( + daily_response[0]["aggregated_value"], + weekly_response[0]["aggregated_value"], + ) @snapshot_clickhouse_queries def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakdown(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["blabla2"], + properties={"$some_prop": "some_val"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"}) _create_event( team=self.team, @@ -1119,7 +1305,13 @@ def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakd "display": TRENDS_TABLE, "interval": "week", "breakdown": "$some_property", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -1128,7 +1320,10 @@ def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakd # value1 has: 5 seconds, 10 seconds, 15 seconds # value2 has: 10 seconds, 15 seconds (aggregated by session, so 15 is not double counted) # empty has: 1 seconds - self.assertEqual([resp["breakdown_value"] for resp in daily_response], ["value2", "value1", ""]) + self.assertEqual( + [resp["breakdown_value"] for resp in daily_response], + ["value2", "value1", ""], + ) self.assertEqual([resp["aggregated_value"] for resp in daily_response], [12.5, 10, 1]) with freeze_time("2020-01-04T13:00:01Z"): @@ -1139,7 +1334,13 @@ def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakd "display": TRENDS_TABLE, "interval": "day", "breakdown": "$some_property", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -1157,9 +1358,15 @@ def test_trends_breakdown_with_session_property_single_aggregate_math_and_breakd @snapshot_clickhouse_queries def test_trends_person_breakdown_with_session_property_single_aggregate_math_and_breakdown(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["blabla2"], + properties={"$some_prop": "another_val"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "another_val"}) _create_event( team=self.team, @@ -1247,7 +1454,13 @@ def test_trends_person_breakdown_with_session_property_single_aggregate_math_and "interval": "week", "breakdown": "$some_prop", "breakdown_type": "person", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -1255,7 +1468,10 @@ def test_trends_person_breakdown_with_session_property_single_aggregate_math_and # another_val has: 10 seconds # some_val has: 1, 5 seconds, 15 seconds - self.assertEqual([resp["breakdown_value"] for resp in daily_response], ["another_val", "some_val"]) + self.assertEqual( + [resp["breakdown_value"] for resp in daily_response], + ["another_val", "some_val"], + ) self.assertEqual([resp["aggregated_value"] for resp in daily_response], [10.0, 5.0]) @snapshot_clickhouse_queries @@ -1289,7 +1505,6 @@ def test_trends_any_event_total_count(self): @also_test_with_materialized_columns(["$math_prop", "$some_property"]) def test_trends_breakdown_with_math_func(self): - with freeze_time("2020-01-01 00:06:34"): for i in range(20): _create_person(team_id=self.team.pk, distinct_ids=[f"person{i}"]) @@ -1322,7 +1537,13 @@ def test_trends_breakdown_with_math_func(self): "display": TRENDS_TABLE, "interval": "day", "breakdown": "$some_property", - "events": [{"id": "sign up", "math": "p90", "math_property": "$math_prop"}], + "events": [ + { + "id": "sign up", + "math": "p90", + "math_property": "$math_prop", + } + ], }, ), self.team, @@ -1336,7 +1557,14 @@ def test_trends_compare_day_interval_relative_range(self): self._create_events() with freeze_time("2020-01-04T13:00:01Z"): response = Trends().run( - Filter(team=self.team, data={"compare": "true", "date_from": "-7d", "events": [{"id": "sign up"}]}), + Filter( + team=self.team, + data={ + "compare": "true", + "date_from": "-7d", + "events": [{"id": "sign up"}], + }, + ), self.team, ) @@ -1380,7 +1608,11 @@ def test_trends_compare_day_interval_relative_range(self): with freeze_time("2020-01-04T13:00:01Z"): no_compare_response = Trends().run( - Filter(team=self.team, data={"compare": "false", "events": [{"id": "sign up"}]}), self.team + Filter( + team=self.team, + data={"compare": "false", "events": [{"id": "sign up"}]}, + ), + self.team, ) self.assertEqual(no_compare_response[0]["label"], "sign up") @@ -1555,17 +1787,28 @@ def _test_events_with_dates(self, dates: List[str], result, query_time=None, **f for time in dates: with freeze_time(time): _create_event( - event="event_name", team=self.team, distinct_id="person_1", properties={"$browser": "Safari"} + event="event_name", + team=self.team, + distinct_id="person_1", + properties={"$browser": "Safari"}, ) if query_time: with freeze_time(query_time): response = Trends().run( - Filter(team=self.team, data={**filter_params, "events": [{"id": "event_name"}]}), self.team + Filter( + team=self.team, + data={**filter_params, "events": [{"id": "event_name"}]}, + ), + self.team, ) else: response = Trends().run( - Filter(team=self.team, data={**filter_params, "events": [{"id": "event_name"}]}), self.team + Filter( + team=self.team, + data={**filter_params, "events": [{"id": "event_name"}]}, + ), + self.team, ) self.assertEqual(result[0]["count"], response[0]["count"]) @@ -1720,8 +1963,20 @@ def test_week_interval(self): "label": "event_name", "count": 4.0, "data": [0.0, 1.0, 2.0, 1.0, 0.0], - "labels": ["25-Oct-2020", "1-Nov-2020", "8-Nov-2020", "15-Nov-2020", "22-Nov-2020"], - "days": ["2020-10-25", "2020-11-01", "2020-11-08", "2020-11-15", "2020-11-22"], + "labels": [ + "25-Oct-2020", + "1-Nov-2020", + "8-Nov-2020", + "15-Nov-2020", + "22-Nov-2020", + ], + "days": [ + "2020-10-25", + "2020-11-01", + "2020-11-08", + "2020-11-15", + "2020-11-22", + ], } ], ) @@ -1749,8 +2004,22 @@ def test_month_interval(self): "label": "event_name", "count": 3.0, "data": [0.0, 2.0, 0.0, 0.0, 1.0, 0.0], - "labels": ["1-Jun-2020", "1-Jul-2020", "1-Aug-2020", "1-Sep-2020", "1-Oct-2020", "1-Nov-2020"], - "days": ["2020-06-01", "2020-07-01", "2020-08-01", "2020-09-01", "2020-10-01", "2020-11-01"], + "labels": [ + "1-Jun-2020", + "1-Jul-2020", + "1-Aug-2020", + "1-Sep-2020", + "1-Oct-2020", + "1-Nov-2020", + ], + "days": [ + "2020-06-01", + "2020-07-01", + "2020-08-01", + "2020-09-01", + "2020-10-01", + "2020-11-01", + ], } ], ) @@ -1778,7 +2047,12 @@ def test_interval_rounding(self): "label": "event_name", "count": 4.0, "data": [1.0, 2.0, 1.0, 0.0], - "labels": ["1-Nov-2020", "8-Nov-2020", "15-Nov-2020", "22-Nov-2020"], + "labels": [ + "1-Nov-2020", + "8-Nov-2020", + "15-Nov-2020", + "22-Nov-2020", + ], "days": ["2020-11-01", "2020-11-08", "2020-11-15", "2020-11-22"], } ], @@ -1872,7 +2146,12 @@ def test_yesterday_timerange(self): def test_last24hours_timerange(self): self._test_events_with_dates( - dates=["2020-11-01 05:20:00", "2020-11-01 10:22:00", "2020-11-01 10:25:00", "2020-11-02 08:25:00"], + dates=[ + "2020-11-01 05:20:00", + "2020-11-01 10:22:00", + "2020-11-01 10:25:00", + "2020-11-02 08:25:00", + ], date_from="-24h", query_time="2020-11-02 10:20:00", result=[ @@ -1900,9 +2179,14 @@ def test_last24hours_timerange(self): def test_last48hours_timerange(self): self._test_events_with_dates( - dates=["2020-11-01 05:20:00", "2020-11-01 10:22:00", "2020-11-01 10:25:00", "2020-11-02 08:25:00"], - date_from="-48h", - query_time="2020-11-02 10:20:00", + dates=[ + "2020-11-01 05:20:00", + "2020-11-01 10:22:00", + "2020-11-01 10:25:00", + "2020-11-02 08:25:00", + ], + date_from="-48h", + query_time="2020-11-02 10:20:00", result=[ { "action": { @@ -1928,7 +2212,12 @@ def test_last48hours_timerange(self): def test_last7days_timerange(self): self._test_events_with_dates( - dates=["2020-11-01 05:20:00", "2020-11-02 10:22:00", "2020-11-04 10:25:00", "2020-11-05 08:25:00"], + dates=[ + "2020-11-01 05:20:00", + "2020-11-02 10:22:00", + "2020-11-04 10:25:00", + "2020-11-05 08:25:00", + ], date_from="-7d", query_time="2020-11-07 10:20:00", result=[ @@ -2000,7 +2289,23 @@ def test_last14days_timerange(self): }, "label": "event_name", "count": 6.0, - "data": [0.0, 1.0, 1.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + "data": [ + 0.0, + 1.0, + 1.0, + 0.0, + 1.0, + 2.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + ], "labels": [ "31-Oct-2020", "1-Nov-2020", @@ -2077,7 +2382,14 @@ def test_last30days_timerange(self): "22-Nov-2020", "29-Nov-2020", ], - "days": ["2020-10-25", "2020-11-01", "2020-11-08", "2020-11-15", "2020-11-22", "2020-11-29"], + "days": [ + "2020-10-25", + "2020-11-01", + "2020-11-08", + "2020-11-15", + "2020-11-22", + "2020-11-29", + ], } ], ) @@ -2293,8 +2605,22 @@ def test_custom_range_timerange(self): "label": "event_name", "count": 3.0, "data": [2.0, 0.0, 0.0, 0.0, 1.0, 0.0], - "labels": ["5-Jan-2020", "6-Jan-2020", "7-Jan-2020", "8-Jan-2020", "9-Jan-2020", "10-Jan-2020"], - "days": ["2020-01-05", "2020-01-06", "2020-01-07", "2020-01-08", "2020-01-09", "2020-01-10"], + "labels": [ + "5-Jan-2020", + "6-Jan-2020", + "7-Jan-2020", + "8-Jan-2020", + "9-Jan-2020", + "10-Jan-2020", + ], + "days": [ + "2020-01-05", + "2020-01-06", + "2020-01-07", + "2020-01-08", + "2020-01-09", + "2020-01-10", + ], } ], ) @@ -2363,9 +2689,15 @@ def test_trends_with_hogql_math(self): @snapshot_clickhouse_queries def test_trends_with_session_property_total_volume_math(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["blabla2"], + properties={"$some_prop": "some_val"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"}) _create_event( team=self.team, @@ -2452,7 +2784,13 @@ def test_trends_with_session_property_total_volume_math(self): team=self.team, data={ "interval": "week", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -2464,7 +2802,13 @@ def test_trends_with_session_property_total_volume_math(self): team=self.team, data={ "interval": "day", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -2491,9 +2835,15 @@ def test_trends_with_session_property_total_volume_math(self): @snapshot_clickhouse_queries def test_trends_with_session_property_total_volume_math_with_breakdowns(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["blabla2"], + properties={"$some_prop": "some_val"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"}) _create_event( team=self.team, @@ -2581,7 +2931,13 @@ def test_trends_with_session_property_total_volume_math_with_breakdowns(self): data={ "breakdown": "$some_property", "interval": "week", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -2594,7 +2950,13 @@ def test_trends_with_session_property_total_volume_math_with_breakdowns(self): data={ "breakdown": "$some_property", "interval": "day", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -2626,9 +2988,15 @@ def test_trends_with_session_property_total_volume_math_with_breakdowns(self): def test_trends_with_session_property_total_volume_math_with_sessions_spanning_multiple_intervals(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["blabla2"], + properties={"$some_prop": "some_val"}, ) - _create_person(team_id=self.team.pk, distinct_ids=["blabla2"], properties={"$some_prop": "some_val"}) _create_event( team=self.team, @@ -2674,7 +3042,13 @@ def test_trends_with_session_property_total_volume_math_with_sessions_spanning_m team=self.team, data={ "interval": "day", - "events": [{"id": "sign up", "math": "median", "math_property": "$session_duration"}], + "events": [ + { + "id": "sign up", + "math": "median", + "math_property": "$session_duration", + } + ], }, ), self.team, @@ -2717,9 +3091,24 @@ def test_filter_events_by_cohort(self): _create_person(team_id=self.team.pk, distinct_ids=["person_1"], properties={"name": "John"}) _create_person(team_id=self.team.pk, distinct_ids=["person_2"], properties={"name": "Jane"}) - _create_event(event="event_name", team=self.team, distinct_id="person_1", properties={"$browser": "Safari"}) - _create_event(event="event_name", team=self.team, distinct_id="person_2", properties={"$browser": "Chrome"}) - _create_event(event="event_name", team=self.team, distinct_id="person_2", properties={"$browser": "Safari"}) + _create_event( + event="event_name", + team=self.team, + distinct_id="person_1", + properties={"$browser": "Safari"}, + ) + _create_event( + event="event_name", + team=self.team, + distinct_id="person_2", + properties={"$browser": "Chrome"}, + ) + _create_event( + event="event_name", + team=self.team, + distinct_id="person_2", + properties={"$browser": "Safari"}, + ) cohort = _create_cohort( team=self.team, @@ -2745,12 +3134,35 @@ def test_filter_events_by_cohort(self): @snapshot_clickhouse_queries def test_filter_events_by_precalculated_cohort(self): with freeze_time("2020-01-02"): - _create_person(team_id=self.team.pk, distinct_ids=["person_1"], properties={"name": "John"}) - _create_person(team_id=self.team.pk, distinct_ids=["person_2"], properties={"name": "Jane"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person_1"], + properties={"name": "John"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person_2"], + properties={"name": "Jane"}, + ) - _create_event(event="event_name", team=self.team, distinct_id="person_1", properties={"$browser": "Safari"}) - _create_event(event="event_name", team=self.team, distinct_id="person_2", properties={"$browser": "Chrome"}) - _create_event(event="event_name", team=self.team, distinct_id="person_2", properties={"$browser": "Safari"}) + _create_event( + event="event_name", + team=self.team, + distinct_id="person_1", + properties={"$browser": "Safari"}, + ) + _create_event( + event="event_name", + team=self.team, + distinct_id="person_2", + properties={"$browser": "Chrome"}, + ) + _create_event( + event="event_name", + team=self.team, + distinct_id="person_2", + properties={"$browser": "Safari"}, + ) cohort = _create_cohort( team=self.team, @@ -2785,7 +3197,13 @@ def test_interval_filtering_hour(self): with freeze_time("2020-01-02"): response = Trends().run( - Filter(data={"date_from": "2019-12-24", "interval": "hour", "events": [{"id": "sign up"}]}), + Filter( + data={ + "date_from": "2019-12-24", + "interval": "hour", + "events": [{"id": "sign up"}], + } + ), self.team, ) self.assertEqual(response[0]["labels"][3], "24-Dec-2019 03:00") @@ -2810,7 +3228,8 @@ def test_interval_filtering_week(self): self.team, ) self.assertEqual( - response[0]["labels"][:5], ["24-Nov-2019", "1-Dec-2019", "8-Dec-2019", "15-Dec-2019", "22-Dec-2019"] + response[0]["labels"][:5], + ["24-Nov-2019", "1-Dec-2019", "8-Dec-2019", "15-Dec-2019", "22-Dec-2019"], ) self.assertEqual(response[0]["data"][:5], [0.0, 0.0, 0.0, 0.0, 1.0]) @@ -2820,7 +3239,12 @@ def test_interval_filtering_month(self): with freeze_time("2020-01-02"): response = Trends().run( Filter( - team=self.team, data={"date_from": "2019-9-24", "interval": "month", "events": [{"id": "sign up"}]} + team=self.team, + data={ + "date_from": "2019-9-24", + "interval": "month", + "events": [{"id": "sign up"}], + }, ), self.team, ) @@ -2839,7 +3263,14 @@ def test_interval_filtering_today_hourly(self): with freeze_time("2020-01-02T23:31:00Z"): response = Trends().run( - Filter(team=self.team, data={"date_from": "dStart", "interval": "hour", "events": [{"id": "sign up"}]}), + Filter( + team=self.team, + data={ + "date_from": "dStart", + "interval": "hour", + "events": [{"id": "sign up"}], + }, + ), self.team, ) self.assertEqual(response[0]["labels"][23], "2-Jan-2020 23:00") @@ -2860,25 +3291,56 @@ def test_breakdown_label(self): self.assertEqual(none_label, {"label": "$pageview - Other", "breakdown_value": "Other"}) cohort_all_label = breakdown_label(entity, "cohort_all") - self.assertEqual(cohort_all_label, {"label": "$pageview - all users", "breakdown_value": "all"}) + self.assertEqual( + cohort_all_label, + {"label": "$pageview - all users", "breakdown_value": "all"}, + ) cohort = _create_cohort(team=self.team, name="cohort1", groups=[{"properties": {"name": "Jane"}}]) cohort_label = breakdown_label(entity, f"cohort_{cohort.pk}") - self.assertEqual(cohort_label, {"label": f"$pageview - {cohort.name}", "breakdown_value": cohort.pk}) + self.assertEqual( + cohort_label, + {"label": f"$pageview - {cohort.name}", "breakdown_value": cohort.pk}, + ) @also_test_with_materialized_columns(["key"]) def test_breakdown_with_filter(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"email": "test@posthog.com"}) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "test@gmail.com"}) - _create_event(event="sign up", distinct_id="person1", team=self.team, properties={"key": "val"}) - _create_event(event="sign up", distinct_id="person2", team=self.team, properties={"key": "oh"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"email": "test@posthog.com"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"email": "test@gmail.com"}, + ) + _create_event( + event="sign up", + distinct_id="person1", + team=self.team, + properties={"key": "val"}, + ) + _create_event( + event="sign up", + distinct_id="person2", + team=self.team, + properties={"key": "oh"}, + ) response = Trends().run( Filter( team=self.team, data={ "date_from": "-14d", "breakdown": "key", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ], "properties": [{"key": "key", "value": "oh", "operator": "not_icontains"}], }, ), @@ -2889,7 +3351,10 @@ def test_breakdown_with_filter(self): def test_action_filtering(self): sign_up_action, person = self._create_events() - action_response = Trends().run(Filter(team=self.team, data={"actions": [{"id": sign_up_action.id}]}), self.team) + action_response = Trends().run( + Filter(team=self.team, data={"actions": [{"id": sign_up_action.id}]}), + self.team, + ) event_response = Trends().run(Filter(team=self.team, data={"events": [{"id": "sign up"}]}), self.team) self.assertEqual(len(action_response), 1) @@ -2931,7 +3396,9 @@ def test_action_filtering_with_cohort(self): ) sign_up_action = _create_action( - team=self.team, name="sign up", properties=[{"key": "id", "type": "cohort", "value": cohort.id}] + team=self.team, + name="sign up", + properties=[{"key": "id", "type": "cohort", "value": cohort.id}], ) cohort.calculate_people_ch(pending_version=2) @@ -2963,9 +3430,21 @@ def test_trends_for_non_existing_action(self): @also_test_with_materialized_columns(person_properties=["email", "bar"]) def test_trends_regression_filtering_by_action_with_person_properties(self): - _create_person(team_id=self.team.pk, properties={"email": "foo@example.com", "bar": "aa"}, distinct_ids=["d1"]) - _create_person(team_id=self.team.pk, properties={"email": "bar@example.com", "bar": "bb"}, distinct_ids=["d2"]) - _create_person(team_id=self.team.pk, properties={"email": "efg@example.com", "bar": "ab"}, distinct_ids=["d3"]) + _create_person( + team_id=self.team.pk, + properties={"email": "foo@example.com", "bar": "aa"}, + distinct_ids=["d1"], + ) + _create_person( + team_id=self.team.pk, + properties={"email": "bar@example.com", "bar": "bb"}, + distinct_ids=["d2"], + ) + _create_person( + team_id=self.team.pk, + properties={"email": "efg@example.com", "bar": "ab"}, + distinct_ids=["d3"], + ) _create_person(team_id=self.team.pk, properties={"bar": "aa"}, distinct_ids=["d4"]) with freeze_time("2020-01-02 16:34:34"): @@ -2983,7 +3462,11 @@ def test_trends_regression_filtering_by_action_with_person_properties(self): with freeze_time("2020-01-04T13:01:01Z"): response = Trends().run( - Filter(team=self.team, data={"actions": [{"id": event_filtering_action.id}]}), self.team + Filter( + team=self.team, + data={"actions": [{"id": event_filtering_action.id}]}, + ), + self.team, ) self.assertEqual(len(response), 1) self.assertEqual(response[0]["count"], 3) @@ -2994,7 +3477,14 @@ def test_trends_regression_filtering_by_action_with_person_properties(self): team=self.team, data={ "actions": [{"id": event_filtering_action.id}], - "properties": [{"key": "email", "type": "person", "value": "is_set", "operator": "is_set"}], + "properties": [ + { + "key": "email", + "type": "person", + "value": "is_set", + "operator": "is_set", + } + ], }, ), self.team, @@ -3011,7 +3501,11 @@ def test_dau_filtering(self): with freeze_time("2020-01-04"): action_response = Trends().run( - Filter(team=self.team, data={"actions": [{"id": sign_up_action.id, "math": "dau"}]}), self.team + Filter( + team=self.team, + data={"actions": [{"id": sign_up_action.id, "math": "dau"}]}, + ), + self.team, ) response = Trends().run(Filter(data={"events": [{"id": "sign up", "math": "dau"}]}), self.team) @@ -3024,9 +3518,17 @@ def _create_maths_events(self, values): _create_person(team_id=self.team.pk, distinct_ids=["someone_else"]) for value in values: _create_event( - team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": value} + team=self.team, + event="sign up", + distinct_id="someone_else", + properties={"some_number": value}, ) - _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": None}) + _create_event( + team=self.team, + event="sign up", + distinct_id="someone_else", + properties={"some_number": None}, + ) return sign_up_action def _test_math_property_aggregation(self, math_property, values, expected_value): @@ -3035,12 +3537,30 @@ def _test_math_property_aggregation(self, math_property, values, expected_value) action_response = Trends().run( Filter( team=self.team, - data={"actions": [{"id": sign_up_action.id, "math": math_property, "math_property": "some_number"}]}, + data={ + "actions": [ + { + "id": sign_up_action.id, + "math": math_property, + "math_property": "some_number", + } + ] + }, ), self.team, ) event_response = Trends().run( - Filter(data={"events": [{"id": "sign up", "math": math_property, "math_property": "some_number"}]}), + Filter( + data={ + "events": [ + { + "id": "sign up", + "math": math_property, + "math_property": "some_number", + } + ] + } + ), self.team, ) # :TRICKY: Work around clickhouse functions not being 100% @@ -3083,16 +3603,47 @@ def test_p99_filtering(self): def test_avg_filtering_non_number_resiliency(self): sign_up_action, person = self._create_events() _create_person(team_id=self.team.pk, distinct_ids=["someone_else"]) - _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": 2}) - _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": "x"}) - _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": None}) - _create_event(team=self.team, event="sign up", distinct_id="someone_else", properties={"some_number": 8}) + _create_event( + team=self.team, + event="sign up", + distinct_id="someone_else", + properties={"some_number": 2}, + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="someone_else", + properties={"some_number": "x"}, + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="someone_else", + properties={"some_number": None}, + ) + _create_event( + team=self.team, + event="sign up", + distinct_id="someone_else", + properties={"some_number": 8}, + ) action_response = Trends().run( - Filter(data={"actions": [{"id": sign_up_action.id, "math": "avg", "math_property": "some_number"}]}), + Filter( + data={ + "actions": [ + { + "id": sign_up_action.id, + "math": "avg", + "math_property": "some_number", + } + ] + } + ), self.team, ) event_response = Trends().run( - Filter(data={"events": [{"id": "sign up", "math": "avg", "math_property": "some_number"}]}), self.team + Filter(data={"events": [{"id": "sign up", "math": "avg", "math_property": "some_number"}]}), + self.team, ) self.assertEqual(action_response[0]["data"][-1], 5) self.assertEntityResponseEqual(action_response, event_response) @@ -3107,8 +3658,14 @@ def test_per_entity_filtering(self): data={ "date_from": "-7d", "events": [ - {"id": "sign up", "properties": [{"key": "$some_property", "value": "value"}]}, - {"id": "sign up", "properties": [{"key": "$some_property", "value": "other_value"}]}, + { + "id": "sign up", + "properties": [{"key": "$some_property", "value": "value"}], + }, + { + "id": "sign up", + "properties": [{"key": "$some_property", "value": "other_value"}], + }, ], }, ), @@ -3123,10 +3680,26 @@ def test_per_entity_filtering(self): self.assertEqual(response[1]["count"], 1) def _create_multiple_people(self): - person1 = _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"name": "person1"}) - person2 = _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"name": "person2"}) - person3 = _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"name": "person3"}) - person4 = _create_person(team_id=self.team.pk, distinct_ids=["person4"], properties={"name": "person4"}) + person1 = _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"name": "person1"}, + ) + person2 = _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"name": "person2"}, + ) + person3 = _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"name": "person3"}, + ) + person4 = _create_person( + team_id=self.team.pk, + distinct_ids=["person4"], + properties={"name": "person4"}, + ) journey = { "person1": [ @@ -3255,7 +3828,13 @@ def test_entity_person_property_filtering(self): "events": [ { "id": "watched movie", - "properties": [{"key": "name", "value": "person1", "type": "person"}], + "properties": [ + { + "key": "name", + "value": "person1", + "type": "person", + } + ], } ] }, @@ -3269,7 +3848,12 @@ def test_entity_person_property_filtering(self): def test_breakdown_by_empty_cohort(self): _create_person(team_id=self.team.pk, distinct_ids=["p1"], properties={"name": "p1"}) - _create_event(team=self.team, event="$pageview", distinct_id="p1", timestamp="2020-01-04T12:00:00Z") + _create_event( + team=self.team, + event="$pageview", + distinct_id="p1", + timestamp="2020-01-04T12:00:00Z", + ) with freeze_time("2020-01-04T13:01:01Z"): event_response = Trends().run( @@ -3332,7 +3916,14 @@ def test_breakdown_by_cohort(self): "date_from": "-14d", "breakdown": json.dumps([cohort.pk, cohort2.pk, cohort3.pk, "all"]), "breakdown_type": "cohort", - "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}], + "events": [ + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + "order": 0, + } + ], }, ), self.team, @@ -3403,7 +3994,8 @@ def test_interval_filtering_breakdown(self): ) self.assertEqual( - response[0]["labels"][:5], ["24-Nov-2019", "1-Dec-2019", "8-Dec-2019", "15-Dec-2019", "22-Dec-2019"] + response[0]["labels"][:5], + ["24-Nov-2019", "1-Dec-2019", "8-Dec-2019", "15-Dec-2019", "22-Dec-2019"], ) self.assertEqual(response[0]["data"][:5], [0.0, 0.0, 0.0, 0.0, 1.0]) @@ -3472,14 +4064,22 @@ def test_breakdown_by_person_property(self): "date_from": "-14d", "breakdown": "name", "breakdown_type": "person", - "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}], + "events": [ + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + "order": 0, + } + ], }, ), self.team, ) self.assertListEqual( - sorted(res["breakdown_value"] for res in event_response), ["person1", "person2", "person3"] + sorted(res["breakdown_value"] for res in event_response), + ["person1", "person2", "person3"], ) for response in event_response: @@ -3505,14 +4105,22 @@ def test_breakdown_by_person_property_for_person_on_events(self): "date_from": "-14d", "breakdown": "name", "breakdown_type": "person", - "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}], + "events": [ + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + "order": 0, + } + ], }, ), self.team, ) self.assertListEqual( - sorted(res["breakdown_value"] for res in event_response), ["person1", "person2", "person3"] + sorted(res["breakdown_value"] for res in event_response), + ["person1", "person2", "person3"], ) for response in event_response: @@ -3564,14 +4172,22 @@ def test_breakdown_by_person_property_for_person_on_events_with_zero_person_ids( "date_from": "-14d", "breakdown": "name", "breakdown_type": "person", - "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}], + "events": [ + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + "order": 0, + } + ], }, ), self.team, ) self.assertListEqual( - sorted(res["breakdown_value"] for res in event_response), ["person1", "person2", "person3"] + sorted(res["breakdown_value"] for res in event_response), + ["person1", "person2", "person3"], ) for response in event_response: @@ -3643,7 +4259,13 @@ def test_breakdown_by_property_pie(self): "breakdown_type": "event", "display": "ActionsPie", "events": [ - {"id": "watched movie", "name": "watched movie", "type": "events", "order": 0, "math": "dau"} + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + "order": 0, + "math": "dau", + } ], } event_response = Trends().run(Filter(team=self.team, data=data), self.team) @@ -3652,7 +4274,8 @@ def test_breakdown_by_property_pie(self): entity = Entity({"id": "watched movie", "type": "events", "math": "dau"}) people_value_1 = self._get_trend_people( - Filter(team=self.team, data={**data, "breakdown_value": "value_1"}), entity + Filter(team=self.team, data={**data, "breakdown_value": "value_1"}), + entity, ) assert people_value_1 == [ # Persons with higher value come first @@ -3695,7 +4318,8 @@ def test_breakdown_by_property_pie(self): ] people_value_2 = self._get_trend_people( - Filter(team=self.team, data={**data, "breakdown_value": "value_2"}), entity + Filter(team=self.team, data={**data, "breakdown_value": "value_2"}), + entity, ) assert people_value_2 == [ { @@ -3763,7 +4387,12 @@ def test_breakdown_by_person_property_pie_with_event_dau_filter(self): "order": 0, "math": "dau", "properties": [ - {"key": "name", "operator": "not_icontains", "value": "person3", "type": "person"} + { + "key": "name", + "operator": "not_icontains", + "value": "person3", + "type": "person", + } ], } ], @@ -3864,7 +4493,11 @@ def test_filter_test_accounts_cohorts(self): self.team.save() response = Trends().run( - Filter(data={"events": [{"id": "event_name"}], "filter_test_accounts": True}, team=self.team), self.team + Filter( + data={"events": [{"id": "event_name"}], "filter_test_accounts": True}, + team=self.team, + ), + self.team, ) self.assertEqual(response[0]["count"], 2) @@ -3969,7 +4602,9 @@ def test_trends_aggregate_by_distinct_id(self): # Stopgap until https://github.com/PostHog/meta/pull/39 is implemented _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) _create_person(team_id=self.team.pk, distinct_ids=["third"]) @@ -3984,7 +4619,13 @@ def test_trends_aggregate_by_distinct_id(self): with override_instance_config("AGGREGATE_BY_DISTINCT_IDS_TEAMS", f"{self.team.pk},4"): with freeze_time("2019-12-31T13:00:01Z"): daily_response = Trends().run( - Filter(team=self.team, data={"interval": "day", "events": [{"id": "sign up", "math": "dau"}]}), + Filter( + team=self.team, + data={ + "interval": "day", + "events": [{"id": "sign up", "math": "dau"}], + }, + ), self.team, ) @@ -3997,7 +4638,13 @@ def test_trends_aggregate_by_distinct_id(self): data={ "interval": "day", "events": [{"id": "sign up", "math": "dau"}], - "properties": [{"key": "$some_prop", "value": "some_val", "type": "person"}], + "properties": [ + { + "key": "$some_prop", + "value": "some_val", + "type": "person", + } + ], }, ), self.team, @@ -4028,7 +4675,10 @@ def test_trends_aggregate_by_distinct_id(self): monthly_response = Trends().run( Filter( team=self.team, - data={"interval": "day", "events": [{"id": "sign up", "math": "monthly_active"}]}, + data={ + "interval": "day", + "events": [{"id": "sign up", "math": "monthly_active"}], + }, ), self.team, ) @@ -4037,7 +4687,11 @@ def test_trends_aggregate_by_distinct_id(self): with freeze_time("2019-12-31T13:00:01Z"): weekly_response = Trends().run( Filter( - team=self.team, data={"interval": "day", "events": [{"id": "sign up", "math": "weekly_active"}]} + team=self.team, + data={ + "interval": "day", + "events": [{"id": "sign up", "math": "weekly_active"}], + }, ), self.team, ) @@ -4067,7 +4721,14 @@ def test_breakdown_filtering_limit(self): data={ "date_from": "-14d", "breakdown": "$some_property", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ], }, ), self.team, @@ -4104,7 +4765,13 @@ def test_breakdown_with_person_property_filter(self): "name": "watched movie", "type": "events", "order": 0, - "properties": [{"key": "name", "value": "person2", "type": "person"}], + "properties": [ + { + "key": "name", + "value": "person2", + "type": "person", + } + ], } ], }, @@ -4128,7 +4795,12 @@ def test_breakdown_filtering(self): "date_from": "-14d", "breakdown": "$some_property", "events": [ - {"id": "sign up", "name": "sign up", "type": "events", "order": 0}, + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + }, {"id": "no events"}, ], }, @@ -4148,13 +4820,36 @@ def test_breakdown_filtering(self): @also_test_with_materialized_columns(person_properties=["email"]) def test_breakdown_filtering_persons(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"email": "test@posthog.com"}) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "test@gmail.com"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"email": "test@posthog.com"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"email": "test@gmail.com"}, + ) _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={}) - _create_event(event="sign up", distinct_id="person1", team=self.team, properties={"key": "val"}) - _create_event(event="sign up", distinct_id="person2", team=self.team, properties={"key": "val"}) - _create_event(event="sign up", distinct_id="person3", team=self.team, properties={"key": "val"}) + _create_event( + event="sign up", + distinct_id="person1", + team=self.team, + properties={"key": "val"}, + ) + _create_event( + event="sign up", + distinct_id="person2", + team=self.team, + properties={"key": "val"}, + ) + _create_event( + event="sign up", + distinct_id="person3", + team=self.team, + properties={"key": "val"}, + ) response = Trends().run( Filter( team=self.team, @@ -4162,7 +4857,14 @@ def test_breakdown_filtering_persons(self): "date_from": "-14d", "breakdown": "email", "breakdown_type": "person", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ], }, ), self.team, @@ -4178,13 +4880,36 @@ def test_breakdown_filtering_persons(self): # ensure that column names are properly handled when subqueries and person subquery share properties column @also_test_with_materialized_columns(event_properties=["key"], person_properties=["email"]) def test_breakdown_filtering_persons_with_action_props(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"email": "test@posthog.com"}) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "test@gmail.com"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"email": "test@posthog.com"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"email": "test@gmail.com"}, + ) _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={}) - _create_event(event="sign up", distinct_id="person1", team=self.team, properties={"key": "val"}) - _create_event(event="sign up", distinct_id="person2", team=self.team, properties={"key": "val"}) - _create_event(event="sign up", distinct_id="person3", team=self.team, properties={"key": "val"}) + _create_event( + event="sign up", + distinct_id="person1", + team=self.team, + properties={"key": "val"}, + ) + _create_event( + event="sign up", + distinct_id="person2", + team=self.team, + properties={"key": "val"}, + ) + _create_event( + event="sign up", + distinct_id="person3", + team=self.team, + properties={"key": "val"}, + ) action = _create_action( name="sign up", team=self.team, @@ -4217,26 +4942,42 @@ def test_breakdown_filtering_with_properties(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Chrome", "$os": "Windows"}, + properties={ + "$current_url": "first url", + "$browser": "Chrome", + "$os": "Windows", + }, ) with freeze_time("2020-01-04T13:01:01Z"): _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Chrome", "$os": "Windows"}, + properties={ + "$current_url": "second url", + "$browser": "Chrome", + "$os": "Windows", + }, ) with freeze_time("2020-01-05T13:01:01Z"): @@ -4278,26 +5019,42 @@ def test_breakdown_filtering_with_properties_in_new_format(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Windows"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Windows", + }, ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Chrome", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Chrome", + "$os": "Mac", + }, ) with freeze_time("2020-01-04T13:01:01Z"): _create_event( team=self.team, event="sign up", distinct_id="blabla1", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, ) _create_event( team=self.team, event="sign up", distinct_id="blabla2", - properties={"$current_url": "second url", "$browser": "Chrome", "$os": "Windows"}, + properties={ + "$current_url": "second url", + "$browser": "Chrome", + "$os": "Windows", + }, ) with freeze_time("2020-01-05T13:01:01Z"): @@ -4318,7 +5075,10 @@ def test_breakdown_filtering_with_properties_in_new_format(self): ], "properties": { "type": "OR", - "values": [{"key": "$browser", "value": "Firefox"}, {"key": "$os", "value": "Windows"}], + "values": [ + {"key": "$browser", "value": "Firefox"}, + {"key": "$os", "value": "Windows"}, + ], }, }, ), @@ -4350,7 +5110,10 @@ def test_breakdown_filtering_with_properties_in_new_format(self): ], "properties": { "type": "AND", - "values": [{"key": "$browser", "value": "Firefox"}, {"key": "$os", "value": "Windows"}], + "values": [ + {"key": "$browser", "value": "Firefox"}, + {"key": "$os", "value": "Windows"}, + ], }, }, ), @@ -4394,7 +5157,13 @@ def test_mau_with_breakdown_filtering_and_prop_filter(self): "breakdown": "$some_prop", "breakdown_type": "person", "events": [{"id": "sign up", "math": "monthly_active"}], - "properties": [{"key": "filter_prop", "value": "filter_val", "type": "person"}], + "properties": [ + { + "key": "filter_prop", + "value": "filter_val", + "type": "person", + } + ], "display": "ActionsLineGraph", }, ), @@ -4415,19 +5184,29 @@ def test_dau_with_breakdown_filtering(self): sign_up_action, _ = self._create_events() with freeze_time("2020-01-02T13:01:01Z"): _create_event( - team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": "other_value"} + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$some_property": "other_value"}, ) with freeze_time("2020-01-04T13:01:01Z"): action_response = Trends().run( Filter( team=self.team, - data={"breakdown": "$some_property", "actions": [{"id": sign_up_action.id, "math": "dau"}]}, + data={ + "breakdown": "$some_property", + "actions": [{"id": sign_up_action.id, "math": "dau"}], + }, ), self.team, ) event_response = Trends().run( Filter( - team=self.team, data={"breakdown": "$some_property", "events": [{"id": "sign up", "math": "dau"}]} + team=self.team, + data={ + "breakdown": "$some_property", + "events": [{"id": "sign up", "math": "dau"}], + }, ), self.team, ) @@ -4448,7 +5227,10 @@ def test_dau_with_breakdown_filtering_with_sampling(self): sign_up_action, _ = self._create_events() with freeze_time("2020-01-02T13:01:01Z"): _create_event( - team=self.team, event="sign up", distinct_id="blabla", properties={"$some_property": "other_value"} + team=self.team, + event="sign up", + distinct_id="blabla", + properties={"$some_property": "other_value"}, ) with freeze_time("2020-01-04T13:01:01Z"): action_response = Trends().run( @@ -4530,7 +5312,9 @@ def test_dau_with_breakdown_filtering_with_prop_filter(self): def test_against_clashing_entity_and_property_filter_naming(self): # Regression test for https://github.com/PostHog/posthog/issues/5814 _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) _create_event( team=self.team, @@ -4548,10 +5332,21 @@ def test_against_clashing_entity_and_property_filter_naming(self): "events": [ { "id": "$pageview", - "properties": [{"key": "$host", "operator": "icontains", "value": ".com"}], + "properties": [ + { + "key": "$host", + "operator": "icontains", + "value": ".com", + } + ], + } + ], + "properties": [ + { + "key": "$host", + "value": ["app.example.com", "another.com"], } ], - "properties": [{"key": "$host", "value": ["app.example.com", "another.com"]}], "breakdown": "$some_prop", "breakdown_type": "person", }, @@ -4565,7 +5360,9 @@ def test_against_clashing_entity_and_property_filter_naming(self): @also_test_with_materialized_columns(["$current_url"]) def test_action_with_prop(self): _create_person( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) sign_up_action = Action.objects.create(team=self.team, name="sign up") ActionStep.objects.create( @@ -4609,7 +5406,9 @@ def test_combine_all_cohort_and_icontains(self): # This caused some issues with SQL parsing sign_up_action, _ = self._create_events() cohort = Cohort.objects.create( - team=self.team, name="a", groups=[{"properties": [{"key": "key", "value": "value", "type": "person"}]}] + team=self.team, + name="a", + groups=[{"properties": [{"key": "key", "value": "value", "type": "person"}]}], ) action_response = Trends().run( Filter( @@ -4642,20 +5441,48 @@ def test_person_filtering_in_cohort_in_action(self): step.save() with freeze_time("2020-01-04T13:01:01Z"): action_response = Trends().run( - Filter(team=self.team, data={"actions": [{"id": sign_up_action.id}], "breakdown": "$some_property"}), + Filter( + team=self.team, + data={ + "actions": [{"id": sign_up_action.id}], + "breakdown": "$some_property", + }, + ), self.team, ) self.assertEqual(action_response[0]["count"], 2) @also_test_with_materialized_columns(event_properties=["key"], person_properties=["email"]) def test_breakdown_user_props_with_filter(self): - _create_person(team_id=self.team.pk, distinct_ids=["person1"], properties={"email": "test@posthog.com"}) - _create_person(team_id=self.team.pk, distinct_ids=["person2"], properties={"email": "test@gmail.com"}) - person = _create_person(team_id=self.team.pk, distinct_ids=["person3"], properties={"email": "test@gmail.com"}) + _create_person( + team_id=self.team.pk, + distinct_ids=["person1"], + properties={"email": "test@posthog.com"}, + ) + _create_person( + team_id=self.team.pk, + distinct_ids=["person2"], + properties={"email": "test@gmail.com"}, + ) + person = _create_person( + team_id=self.team.pk, + distinct_ids=["person3"], + properties={"email": "test@gmail.com"}, + ) create_person_distinct_id(self.team.pk, "person1", str(person.uuid)) - _create_event(event="sign up", distinct_id="person1", team=self.team, properties={"key": "val"}) - _create_event(event="sign up", distinct_id="person2", team=self.team, properties={"key": "val"}) + _create_event( + event="sign up", + distinct_id="person1", + team=self.team, + properties={"key": "val"}, + ) + _create_event( + event="sign up", + distinct_id="person2", + team=self.team, + properties={"key": "val"}, + ) response = Trends().run( Filter( team=self.team, @@ -4663,9 +5490,21 @@ def test_breakdown_user_props_with_filter(self): "date_from": "-14d", "breakdown": "email", "breakdown_type": "person", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ], "properties": [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"}, + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + }, {"key": "key", "value": "val"}, ], }, @@ -4682,7 +5521,11 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns( _create_person( team_id=self.team.pk, distinct_ids=["person1"], - properties={"email": "test@posthog.com", "$os": "ios", "$browser": "chrome"}, + properties={ + "email": "test@posthog.com", + "$os": "ios", + "$browser": "chrome", + }, ) _create_person( team_id=self.team.pk, @@ -4692,41 +5535,103 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns( _create_person( team_id=self.team.pk, distinct_ids=["person3"], - properties={"email": "test2@posthog.com", "$os": "android", "$browser": "chrome"}, + properties={ + "email": "test2@posthog.com", + "$os": "android", + "$browser": "chrome", + }, ) # a second person with same properties, just so snapshot passes on different CH versions (indeterminate sorting currently) _create_person( team_id=self.team.pk, distinct_ids=["person32"], - properties={"email": "test2@posthog.com", "$os": "android", "$browser": "chrome"}, + properties={ + "email": "test2@posthog.com", + "$os": "android", + "$browser": "chrome", + }, ) _create_person( team_id=self.team.pk, distinct_ids=["person4"], - properties={"email": "test3@posthog.com", "$os": "android", "$browser": "safari"}, + properties={ + "email": "test3@posthog.com", + "$os": "android", + "$browser": "safari", + }, ) _create_person( team_id=self.team.pk, distinct_ids=["person5"], - properties={"email": "test4@posthog.com", "$os": "android", "$browser": "safari"}, + properties={ + "email": "test4@posthog.com", + "$os": "android", + "$browser": "safari", + }, ) _create_person( team_id=self.team.pk, distinct_ids=["person6"], - properties={"email": "test5@posthog.com", "$os": "android", "$browser": "safari"}, + properties={ + "email": "test5@posthog.com", + "$os": "android", + "$browser": "safari", + }, ) journeys_for( team=self.team, create_people=False, events_by_person={ - "person1": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}], - "person2": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}], - "person3": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}], - "person32": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}], - "person4": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}], - "person5": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}], - "person6": [{"event": "sign up", "properties": {"key": "val"}, "timestamp": datetime(2020, 5, 1, 0)}], + "person1": [ + { + "event": "sign up", + "properties": {"key": "val"}, + "timestamp": datetime(2020, 5, 1, 0), + } + ], + "person2": [ + { + "event": "sign up", + "properties": {"key": "val"}, + "timestamp": datetime(2020, 5, 1, 0), + } + ], + "person3": [ + { + "event": "sign up", + "properties": {"key": "val"}, + "timestamp": datetime(2020, 5, 1, 0), + } + ], + "person32": [ + { + "event": "sign up", + "properties": {"key": "val"}, + "timestamp": datetime(2020, 5, 1, 0), + } + ], + "person4": [ + { + "event": "sign up", + "properties": {"key": "val"}, + "timestamp": datetime(2020, 5, 1, 0), + } + ], + "person5": [ + { + "event": "sign up", + "properties": {"key": "val"}, + "timestamp": datetime(2020, 5, 1, 0), + } + ], + "person6": [ + { + "event": "sign up", + "properties": {"key": "val"}, + "timestamp": datetime(2020, 5, 1, 0), + } + ], }, ) @@ -4738,7 +5643,14 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns( "date_to": "2020-07-01 00:00:00", "breakdown": "email", "breakdown_type": "person", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ], "properties": { "type": "AND", "values": [ @@ -4757,8 +5669,18 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns( { "type": "OR", "values": [ - {"key": "$os", "value": "android", "operator": "exact", "type": "person"}, - {"key": "$browser", "value": "safari", "operator": "exact", "type": "person"}, + { + "key": "$os", + "value": "android", + "operator": "exact", + "type": "person", + }, + { + "key": "$browser", + "value": "safari", + "operator": "exact", + "type": "person", + }, ], }, ], @@ -4811,8 +5733,18 @@ def test_trend_breakdown_user_props_with_filter_with_partial_property_pushdowns( { "type": "AND", "values": [ - {"key": "$os", "value": "android", "operator": "exact", "type": "person"}, - {"key": "$browser", "value": "chrome", "operator": "exact", "type": "person"}, + { + "key": "$os", + "value": "android", + "operator": "exact", + "type": "person", + }, + { + "key": "$browser", + "value": "chrome", + "operator": "exact", + "type": "person", + }, ], } ], @@ -4898,7 +5830,14 @@ def test_weekly_active_users_aggregated_range_wider_than_week(self): "date_from": "2020-01-01", "date_to": "2020-01-08", "display": TRENDS_TABLE, - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) @@ -4915,7 +5854,14 @@ def test_weekly_active_users_aggregated_range_wider_than_week_with_sampling(self "date_from": "2020-01-01", "date_to": "2020-01-08", "display": TRENDS_TABLE, - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) @@ -4931,7 +5877,14 @@ def test_weekly_active_users_aggregated_range_narrower_than_week(self): "date_from": "2020-01-11", "date_to": "2020-01-12", "display": TRENDS_TABLE, - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) @@ -4948,7 +5901,14 @@ def test_weekly_active_users_monthly(self): "date_from": "2019-12-01", "date_to": "2020-02-29", # T'was a leap year "interval": "month", - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) @@ -4965,7 +5925,14 @@ def test_weekly_active_users_daily(self): data = { "date_from": "2020-01-08", "date_to": "2020-01-19", - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) @@ -5013,7 +5980,14 @@ def test_weekly_active_users_daily_based_on_action(self): data = { "date_from": "2020-01-08", "date_to": "2020-01-19", - "actions": [{"id": action.id, "type": "actions", "order": 0, "math": "weekly_active"}], + "actions": [ + { + "id": action.id, + "type": "actions", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) @@ -5036,7 +6010,10 @@ def test_weekly_active_users_daily_based_on_action(self): ], ) # Same as test_weekly_active_users_daily - self.assertEqual(result[0]["data"], [1.0, 3.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 1.0, 0.0]) + self.assertEqual( + result[0]["data"], + [1.0, 3.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 1.0, 0.0], + ) @also_test_with_different_timezones @snapshot_clickhouse_queries @@ -5047,7 +6024,14 @@ def test_weekly_active_users_weekly(self): "date_from": "2019-12-29", "date_to": "2020-01-18", "interval": "week", - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) @@ -5063,7 +6047,14 @@ def test_weekly_active_users_hourly(self): "date_from": "2020-01-09T06:00:00Z", "date_to": "2020-01-09T17:00:00Z", "interval": "hour", - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) @@ -5091,7 +6082,10 @@ def test_weekly_active_users_hourly(self): # necessary, because there's a presentation issue: in monthly/weekly graphs data points are formatted as # D-MMM-YYYY, so if a user sees e.g. 1-Jan-2077, they'll likely expect the active users count to be for # the first day of the month, and not the last. If they saw just Jan-2077, the more general case would work. - self.assertEqual(result[0]["data"], [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0]) + self.assertEqual( + result[0]["data"], + [3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0], + ) def test_weekly_active_users_daily_based_on_action_with_zero_person_ids(self): # only a person-on-event test @@ -5121,13 +6115,23 @@ def test_weekly_active_users_daily_based_on_action_with_zero_person_ids(self): data = { "date_from": "2020-01-08", "date_to": "2020-01-19", - "actions": [{"id": action.id, "type": "actions", "order": 0, "math": "weekly_active"}], + "actions": [ + { + "id": action.id, + "type": "actions", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) result = Trends().run(filter, self.team) # Zero person IDs shouldn't be counted - self.assertEqual(result[0]["data"], [1.0, 3.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 1.0, 0.0]) + self.assertEqual( + result[0]["data"], + [1.0, 3.0, 2.0, 2.0, 3.0, 3.0, 3.0, 3.0, 3.0, 3.0, 1.0, 0.0], + ) @also_test_with_materialized_columns(["key"]) def test_breakdown_weekly_active_users_daily(self): @@ -5174,12 +6178,22 @@ def test_breakdown_weekly_active_users_daily(self): "date_from": "2020-01-01T00:00:00Z", "date_to": "2020-01-12T00:00:00Z", "breakdown": "key", - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) result = Trends().run(filter, self.team) - self.assertEqual(result[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 0.0]) + self.assertEqual( + result[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 0.0], + ) @also_test_with_materialized_columns(person_properties=["name"]) @snapshot_clickhouse_queries @@ -5212,15 +6226,30 @@ def test_weekly_active_users_filtering(self): data={ "date_from": "2020-01-01T00:00:00Z", "date_to": "2020-01-12T00:00:00Z", - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], "properties": [ - {"key": "name", "operator": "exact", "value": ["person-1", "person-2"], "type": "person"} + { + "key": "name", + "operator": "exact", + "value": ["person-1", "person-2"], + "type": "person", + } ], }, ) result = Trends().run(filter, self.team) - self.assertEqual(result[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 2.0, 2.0]) + self.assertEqual( + result[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 2.0, 2.0], + ) @snapshot_clickhouse_queries def test_breakdown_weekly_active_users_daily_based_on_action(self): @@ -5281,14 +6310,30 @@ def test_breakdown_weekly_active_users_daily_based_on_action(self): cohort = Cohort.objects.create( team=self.team, - groups=[{"properties": [{"key": "name", "operator": "exact", "value": ["p1", "p2"], "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "name", + "operator": "exact", + "value": ["p1", "p2"], + "type": "person", + } + ] + } + ], ) pageview_action = _create_action( name="$pageview", team=self.team, properties=[ - {"key": "name", "operator": "exact", "value": ["p1", "p2", "p3"], "type": "person"}, + { + "key": "name", + "operator": "exact", + "value": ["p1", "p2", "p3"], + "type": "person", + }, {"type": "cohort", "key": "id", "value": cohort.pk}, ], ) @@ -5297,12 +6342,22 @@ def test_breakdown_weekly_active_users_daily_based_on_action(self): "date_from": "2020-01-01T00:00:00Z", "date_to": "2020-01-12T00:00:00Z", "breakdown": "key", - "actions": [{"id": pageview_action.id, "type": "actions", "order": 0, "math": "weekly_active"}], + "actions": [ + { + "id": pageview_action.id, + "type": "actions", + "order": 0, + "math": "weekly_active", + } + ], } filter = Filter(team=self.team, data=data) result = Trends().run(filter, self.team) - self.assertEqual(result[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 0.0]) + self.assertEqual( + result[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 2.0, 2.0, 2.0, 0.0], + ) @also_test_with_materialized_columns(["key"]) @snapshot_clickhouse_queries @@ -5313,7 +6368,14 @@ def test_breakdown_weekly_active_users_aggregated(self): "date_from": "2020-01-11", "date_to": "2020-01-11", "display": TRENDS_TABLE, - "events": [{"id": "$pageview", "type": "events", "order": 0, "math": "weekly_active"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "math": "weekly_active", + } + ], "breakdown": "key", } @@ -5383,7 +6445,14 @@ def test_breakdown_filtering_bar_chart_by_value(self): data={ "date_from": "-7d", "breakdown": "$some_property", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ], "display": TRENDS_BAR_VALUE, }, ), @@ -5555,7 +6624,12 @@ def test_filtering_with_action_props(self): ) response = Trends().run( - Filter(data={"date_from": "-14d", "actions": [{"id": action.pk, "type": "actions", "order": 0}]}), + Filter( + data={ + "date_from": "-14d", + "actions": [{"id": action.pk, "type": "actions", "order": 0}], + } + ), self.team, ) @@ -5573,7 +6647,18 @@ def test_should_throw_exception(self, patch_sync_execute): with self.assertRaises(Exception): with self.settings(TEST=False, DEBUG=False): Trends().run( - Filter(data={"events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}]}), + Filter( + data={ + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ] + } + ), self.team, ) @@ -5585,21 +6670,33 @@ def test_timezones_hourly_relative_from(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-04T22:01:01", ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-05T07:01:01", ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-05T08:01:01", ) @@ -5693,21 +6790,33 @@ def test_timezones_hourly_absolute_from(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-02T17:01:01", ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-03T17:01:01", ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-06T00:30:01", # Shouldn't be included anywhere ) @@ -5779,27 +6888,45 @@ def test_timezones_daily(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-02T17:01:01", ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-03T17:01:01", ) _create_event( team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-06T00:30:01", # Shouldn't be included anywhere ) with freeze_time(datetime(2020, 1, 5, 5, 0, tzinfo=ZoneInfo(self.team.timezone))): response = Trends().run( - Filter(data={"date_from": "-7d", "events": [{"id": "sign up", "name": "sign up"}]}, team=self.team), + Filter( + data={ + "date_from": "-7d", + "events": [{"id": "sign up", "name": "sign up"}], + }, + team=self.team, + ), self.team, ) @@ -5823,12 +6950,16 @@ def test_timezones_daily(self): response = Trends().run( Filter( team=self.team, - data={"date_from": "-14d", "events": [{"id": "sign up", "name": "sign up", "math": "dau"}]}, + data={ + "date_from": "-14d", + "events": [{"id": "sign up", "name": "sign up", "math": "dau"}], + }, ), self.team, ) self.assertEqual( - response[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0] + response[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0], ) self.assertEqual( response[0]["labels"], @@ -5857,7 +6988,13 @@ def test_timezones_daily(self): team=self.team, data={ "date_from": "-7d", - "events": [{"id": "sign up", "name": "sign up", "math": "weekly_active"}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "math": "weekly_active", + } + ], }, ), self.team, @@ -5882,7 +7019,10 @@ def test_timezones_daily(self): response = Trends().run( Filter( team=self.team, - data={"date_from": "-7d", "events": [{"id": "sign up", "name": "sign up", "breakdown": "$os"}]}, + data={ + "date_from": "-7d", + "events": [{"id": "sign up", "name": "sign up", "breakdown": "$os"}], + }, ), self.team, ) @@ -5932,7 +7072,11 @@ def test_non_deterministic_timezones(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, ) with freeze_time("2022-11-10T01:01:01Z"): @@ -5940,7 +7084,11 @@ def test_non_deterministic_timezones(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, ) with freeze_time("2022-11-17T08:30:01Z"): @@ -5948,7 +7096,11 @@ def test_non_deterministic_timezones(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, ) with freeze_time("2022-11-24T08:30:01Z"): @@ -5956,7 +7108,11 @@ def test_non_deterministic_timezones(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, ) with freeze_time("2022-11-30T08:30:01Z"): @@ -5964,7 +7120,11 @@ def test_non_deterministic_timezones(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, ) with freeze_time("2022-11-30T13:01:01Z"): @@ -5991,21 +7151,33 @@ def test_timezones_weekly(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-11T19:01:01", # Saturday; TRICKY: This is the next UTC day in America/Phoenix ) _create_event( # This event should count towards week of 2020-01-12 (or 2020-01-06 in Monday mode) team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-12T02:01:01", # Sunday; TRICKY: This is the previous UTC day in Asia/Tokyo ) _create_event( # This event should count towards week of 2020-01-19 (or 2020-01-20 in Monday mode) team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "second url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "second url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-21T18:01:01", # Tuesday; TRICKY: This is the next UTC day in America/Phoenix ) @@ -6057,7 +7229,11 @@ def test_same_day(self): team=self.team, event="sign up", distinct_id="blabla", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-03T01:01:01Z", ) response = Trends().run( @@ -6086,7 +7262,11 @@ def test_same_day_with_person_on_events_v2(self): team=self.team, event="sign up", distinct_id="distinctid1", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-03T01:01:01Z", person_id=person_id1, ) @@ -6095,7 +7275,11 @@ def test_same_day_with_person_on_events_v2(self): team=self.team, event="sign up", distinct_id="distinctid2", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-03T01:01:01Z", person_id=person_id2, ) @@ -6148,7 +7332,11 @@ def test_same_day_with_person_on_events_v2_latest_override(self): team=self.team, event="sign up", distinct_id="distinctid1", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-03T01:01:01Z", person_id=person_id1, ) @@ -6157,7 +7345,11 @@ def test_same_day_with_person_on_events_v2_latest_override(self): team=self.team, event="some other event", distinct_id="distinctid2", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-03T01:01:01Z", person_id=person_id2, ) @@ -6166,7 +7358,11 @@ def test_same_day_with_person_on_events_v2_latest_override(self): team=self.team, event="sign up", distinct_id="distinctid3", - properties={"$current_url": "first url", "$browser": "Firefox", "$os": "Mac"}, + properties={ + "$current_url": "first url", + "$browser": "Firefox", + "$os": "Mac", + }, timestamp="2020-01-03T01:01:01Z", person_id=person_id3, ) @@ -6232,11 +7428,33 @@ def test_ilike_regression_with_current_clickhouse_version(self): team=self.team, data={ "date_from": "-14d", - "events": [{"id": "watched movie", "name": "watched movie", "type": "events", "order": 0}], + "events": [ + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + "order": 0, + } + ], "properties": [ - {"key": "email", "type": "event", "value": "posthog.com", "operator": "not_icontains"}, - {"key": "name", "type": "event", "value": "posthog.com", "operator": "not_icontains"}, - {"key": "name", "type": "person", "value": "posthog.com", "operator": "not_icontains"}, + { + "key": "email", + "type": "event", + "value": "posthog.com", + "operator": "not_icontains", + }, + { + "key": "name", + "type": "event", + "value": "posthog.com", + "operator": "not_icontains", + }, + { + "key": "name", + "type": "person", + "value": "posthog.com", + "operator": "not_icontains", + }, ], }, ), @@ -6477,7 +7695,13 @@ def test_trends_count_per_group_average_daily(self): team=self.team, data={ "display": TRENDS_LINEAR, - "events": [{"id": "viewed video", "math": "avg_count_per_actor", "math_group_type_index": 0}], + "events": [ + { + "id": "viewed video", + "math": "avg_count_per_actor", + "math_group_type_index": 0, + } + ], "date_from": "2020-01-01", "date_to": "2020-01-07", }, @@ -6518,7 +7742,13 @@ def test_trends_count_per_group_average_aggregated(self): team=self.team, data={ "display": TRENDS_TABLE, - "events": [{"id": "viewed video", "math": "avg_count_per_actor", "math_group_type_index": 0}], + "events": [ + { + "id": "viewed video", + "math": "avg_count_per_actor", + "math_group_type_index": 0, + } + ], "date_from": "2020-01-01", "date_to": "2020-01-07", }, @@ -6537,7 +7767,10 @@ def test_trends_breakdown_timezone(self): with freeze_time("2020-01-03 19:06:34"): _create_person(team_id=self.team.pk, distinct_ids=["another_user"]) _create_event( - team=self.team, event="viewed video", distinct_id="another_user", properties={"color": "orange"} + team=self.team, + event="viewed video", + distinct_id="another_user", + properties={"color": "orange"}, ) daily_response = Trends().run( @@ -6564,11 +7797,29 @@ def _create_groups(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:7", properties={"industry": "finance"}) create_group( - team_id=self.team.pk, group_type_index=1, group_key="company:10", properties={"industry": "finance"} + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:7", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="company:10", + properties={"industry": "finance"}, ) # TODO: Delete this test when moved to person-on-events @@ -6604,8 +7855,22 @@ def test_breakdown_with_filter_groups(self): "date_from": "2020-01-01T00:00:00Z", "date_to": "2020-01-12T00:00:00Z", "breakdown": "key", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], - "properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ], + "properties": [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ], }, ), self.team, @@ -6618,7 +7883,9 @@ def test_breakdown_with_filter_groups(self): self.assertEqual(response[1]["count"], 1) @also_test_with_materialized_columns( - event_properties=["key"], group_properties=[(0, "industry")], materialize_only_with_person_on_events=True + event_properties=["key"], + group_properties=[(0, "industry")], + materialize_only_with_person_on_events=True, ) @snapshot_clickhouse_queries def test_breakdown_with_filter_groups_person_on_events(self): @@ -6653,8 +7920,22 @@ def test_breakdown_with_filter_groups_person_on_events(self): "date_from": "2020-01-01T00:00:00Z", "date_to": "2020-01-12T00:00:00Z", "breakdown": "key", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], - "properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + } + ], + "properties": [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ], }, ), self.team, @@ -6714,8 +7995,23 @@ def test_breakdown_with_filter_groups_person_on_events_v2(self): "date_from": "2020-01-01T00:00:00Z", "date_to": "2020-01-12T00:00:00Z", "breakdown": "key", - "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0, "math": "dau"}], - "properties": [{"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}], + "events": [ + { + "id": "sign up", + "name": "sign up", + "type": "events", + "order": 0, + "math": "dau", + } + ], + "properties": [ + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + } + ], }, ), self.team, @@ -6777,7 +8073,11 @@ def test_breakdown_by_group_props(self): self.assertEqual(response[1]["count"], 1) filter = filter.shallow_clone( - {"breakdown_value": "technology", "date_from": "2020-01-02T00:00:00Z", "date_to": "2020-01-03"} + { + "breakdown_value": "technology", + "date_from": "2020-01-02T00:00:00Z", + "date_to": "2020-01-03", + } ) entity = Entity({"id": "sign up", "name": "sign up", "type": "events", "order": 0}) res = self._get_trend_people(filter, entity) @@ -6839,7 +8139,11 @@ def test_breakdown_by_group_props_person_on_events(self): self.assertEqual(response[1]["count"], 1) filter = filter.shallow_clone( - {"breakdown_value": "technology", "date_from": "2020-01-02T00:00:00Z", "date_to": "2020-01-02"} + { + "breakdown_value": "technology", + "date_from": "2020-01-02T00:00:00Z", + "date_to": "2020-01-02", + } ) entity = Entity({"id": "sign up", "name": "sign up", "type": "events", "order": 0}) res = self._get_trend_people(filter, entity) @@ -6895,7 +8199,12 @@ def test_filtering_with_group_props(self): self._create_groups() Person.objects.create(team_id=self.team.pk, distinct_ids=["person1"], properties={"key": "value"}) - _create_event(event="$pageview", distinct_id="person1", team=self.team, timestamp="2020-01-02T12:00:00Z") + _create_event( + event="$pageview", + distinct_id="person1", + team=self.team, + timestamp="2020-01-02T12:00:00Z", + ) _create_event( event="$pageview", distinct_id="person1", @@ -6925,7 +8234,12 @@ def test_filtering_with_group_props(self): "date_to": "2020-01-12T00:00:00Z", "events": [{"id": "$pageview", "type": "events", "order": 0}], "properties": [ - {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}, + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + }, {"key": "key", "value": "value", "type": "person"}, ], }, @@ -6938,7 +8252,12 @@ def test_filtering_with_group_props_event_with_no_group_data(self): self._create_groups() Person.objects.create(team_id=self.team.pk, distinct_ids=["person1"], properties={"key": "value"}) - _create_event(event="$pageview", distinct_id="person1", team=self.team, timestamp="2020-01-02T12:00:00Z") + _create_event( + event="$pageview", + distinct_id="person1", + team=self.team, + timestamp="2020-01-02T12:00:00Z", + ) _create_event( event="$pageview", distinct_id="person1", @@ -6985,7 +8304,9 @@ def test_filtering_with_group_props_event_with_no_group_data(self): self.assertEqual(response[0]["count"], 4) @also_test_with_materialized_columns( - person_properties=["key"], group_properties=[(0, "industry")], materialize_only_with_person_on_events=True + person_properties=["key"], + group_properties=[(0, "industry")], + materialize_only_with_person_on_events=True, ) @snapshot_clickhouse_queries def test_breakdown_by_group_props_with_person_filter_person_on_events(self): @@ -7033,14 +8354,21 @@ def test_breakdown_by_group_props_with_person_filter_person_on_events(self): self.assertEqual(response[0]["count"], 1) @also_test_with_materialized_columns( - person_properties=["key"], group_properties=[(0, "industry")], materialize_only_with_person_on_events=True + person_properties=["key"], + group_properties=[(0, "industry")], + materialize_only_with_person_on_events=True, ) @snapshot_clickhouse_queries def test_filtering_with_group_props_person_on_events(self): self._create_groups() Person.objects.create(team_id=self.team.pk, distinct_ids=["person1"], properties={"key": "value"}) - _create_event(event="$pageview", distinct_id="person1", team=self.team, timestamp="2020-01-02T12:00:00Z") + _create_event( + event="$pageview", + distinct_id="person1", + team=self.team, + timestamp="2020-01-02T12:00:00Z", + ) _create_event( event="$pageview", distinct_id="person1", @@ -7070,7 +8398,12 @@ def test_filtering_with_group_props_person_on_events(self): "date_to": "2020-01-12T00:00:00Z", "events": [{"id": "$pageview", "type": "events", "order": 0}], "properties": [ - {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}, + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + }, {"key": "key", "value": "value", "type": "person"}, ], }, @@ -7081,17 +8414,38 @@ def test_filtering_with_group_props_person_on_events(self): self.assertEqual(response[0]["count"], 1) @also_test_with_materialized_columns( - group_properties=[(0, "industry"), (2, "name")], materialize_only_with_person_on_events=True + group_properties=[(0, "industry"), (2, "name")], + materialize_only_with_person_on_events=True, ) @snapshot_clickhouse_queries def test_filtering_by_multiple_groups_person_on_events(self): GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.team, group_type="company", group_type_index=2) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:6", properties={"industry": "technology"}) - create_group(team_id=self.team.pk, group_type_index=2, group_key="company:5", properties={"name": "five"}) - create_group(team_id=self.team.pk, group_type_index=2, group_key="company:6", properties={"name": "six"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:6", + properties={"industry": "technology"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=2, + group_key="company:5", + properties={"name": "five"}, + ) + create_group( + team_id=self.team.pk, + group_type_index=2, + group_key="company:6", + properties={"name": "six"}, + ) journey = { "person1": [ @@ -7105,8 +8459,16 @@ def test_filtering_by_multiple_groups_person_on_events(self): "timestamp": datetime(2020, 1, 2, 12, 30), "properties": {"$group_2": "company:6"}, }, - {"event": "sign up", "timestamp": datetime(2020, 1, 2, 13), "properties": {"$group_0": "org:6"}}, - {"event": "sign up", "timestamp": datetime(2020, 1, 3, 15), "properties": {"$group_2": "company:5"}}, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 2, 13), + "properties": {"$group_0": "org:6"}, + }, + { + "event": "sign up", + "timestamp": datetime(2020, 1, 3, 15), + "properties": {"$group_2": "company:5"}, + }, ] } @@ -7119,8 +8481,18 @@ def test_filtering_by_multiple_groups_person_on_events(self): "date_to": "2020-01-12", "events": [{"id": "sign up", "name": "sign up", "type": "events", "order": 0}], "properties": [ - {"key": "industry", "value": "finance", "type": "group", "group_type_index": 0}, - {"key": "name", "value": "six", "type": "group", "group_type_index": 2}, + { + "key": "industry", + "value": "finance", + "type": "group", + "group_type_index": 0, + }, + { + "key": "name", + "value": "six", + "type": "group", + "group_type_index": 2, + }, ], }, ) @@ -7130,7 +8502,10 @@ def test_filtering_by_multiple_groups_person_on_events(self): self.assertEqual(len(response), 1) self.assertEqual(response[0]["count"], 1) - self.assertEqual(response[0]["data"], [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + self.assertEqual( + response[0]["data"], + [0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) filter = filter.shallow_clone({"date_from": "2020-01-02T00:00:00Z", "date_to": "2020-01-02T00:00:00Z"}) entity = Entity({"id": "sign up", "name": "sign up", "type": "events", "order": 0}) @@ -7161,7 +8536,16 @@ def test_get_cached_result_bad_cache(self): set_instance_setting("STRICT_CACHING_TEAMS", "all") fake_cached = { - "result": [{"days": ["2020-11-01 05:20:00", "2020-11-01 10:22:00", "2020-11-01 10:25:00"], "data": []}] + "result": [ + { + "days": [ + "2020-11-01 05:20:00", + "2020-11-01 10:22:00", + "2020-11-01 10:25:00", + ], + "data": [], + } + ] } filter = Filter( @@ -7183,7 +8567,14 @@ def test_get_cached_result_hour(self): fake_cached = { "result": [ - {"days": ["2020-11-01 05:20:00", "2020-11-01 10:22:00", "2020-11-01 10:25:00"], "data": [0.0, 0.0, 0.0]} + { + "days": [ + "2020-11-01 05:20:00", + "2020-11-01 10:22:00", + "2020-11-01 10:25:00", + ], + "data": [0.0, 0.0, 0.0], + } ] } @@ -7215,10 +8606,21 @@ def test_get_cached_result_hour(self): def test_get_cached_result_day(self): set_instance_setting("STRICT_CACHING_TEAMS", "all") - fake_cached = {"result": [{"days": ["2020-01-02", "2020-01-03", "2020-01-04"], "data": [0.0, 0.0, 0.0]}]} + fake_cached = { + "result": [ + { + "days": ["2020-01-02", "2020-01-03", "2020-01-04"], + "data": [0.0, 0.0, 0.0], + } + ] + } filter = Filter( team=self.team, - data={"date_from": "2020-01-02", "date_to": "2020-01-04", "events": [{"id": "sign up", "name": "sign up"}]}, + data={ + "date_from": "2020-01-02", + "date_to": "2020-01-04", + "events": [{"id": "sign up", "name": "sign up"}], + }, ) cache_key = generate_cache_key(f"{filter.toJSON()}_{self.team.pk}") cache.set(cache_key, fake_cached, settings.CACHED_RESULTS_TTL) @@ -7226,7 +8628,14 @@ def test_get_cached_result_day(self): res = Trends().get_cached_result(filter, self.team) self.assertTrue(res) - fake_cached = {"result": [{"days": ["2020-01-01", "2020-01-02", "2020-01-03"], "data": [0.0, 0.0, 0.0]}]} + fake_cached = { + "result": [ + { + "days": ["2020-01-01", "2020-01-02", "2020-01-03"], + "data": [0.0, 0.0, 0.0], + } + ] + } cache.set(cache_key, fake_cached, settings.CACHED_RESULTS_TTL) @@ -7236,11 +8645,22 @@ def test_get_cached_result_day(self): def test_get_cached_result_week(self): set_instance_setting("STRICT_CACHING_TEAMS", "all") - fake_cached = {"result": [{"days": ["2020-11-01", "2020-11-08", "2020-11-15"], "data": [0.0, 0.0, 0.0]}]} + fake_cached = { + "result": [ + { + "days": ["2020-11-01", "2020-11-08", "2020-11-15"], + "data": [0.0, 0.0, 0.0], + } + ] + } filter = Filter( team=self.team, - data={"date_to": "2020-11-16", "events": [{"id": "sign up", "name": "sign up"}], "interval": "week"}, + data={ + "date_to": "2020-11-16", + "events": [{"id": "sign up", "name": "sign up"}], + "interval": "week", + }, ) cache_key = generate_cache_key(f"{filter.toJSON()}_{self.team.pk}") cache.set(cache_key, fake_cached, settings.CACHED_RESULTS_TTL) @@ -7250,7 +8670,11 @@ def test_get_cached_result_week(self): filter = Filter( team=self.team, - data={"date_to": "2020-11-23", "events": [{"id": "sign up", "name": "sign up"}], "interval": "week"}, + data={ + "date_to": "2020-11-23", + "events": [{"id": "sign up", "name": "sign up"}], + "interval": "week", + }, ) res = Trends().get_cached_result(filter, self.team) @@ -7259,11 +8683,22 @@ def test_get_cached_result_week(self): def test_get_cached_result_month(self): set_instance_setting("STRICT_CACHING_TEAMS", "all") - fake_cached = {"result": [{"days": ["2020-09-01", "2020-10-01", "2020-11-01"], "data": [0.0, 0.0, 0.0]}]} + fake_cached = { + "result": [ + { + "days": ["2020-09-01", "2020-10-01", "2020-11-01"], + "data": [0.0, 0.0, 0.0], + } + ] + } filter = Filter( team=self.team, - data={"date_to": "2020-11-16", "events": [{"id": "sign up", "name": "sign up"}], "interval": "month"}, + data={ + "date_to": "2020-11-16", + "events": [{"id": "sign up", "name": "sign up"}], + "interval": "month", + }, ) cache_key = generate_cache_key(f"{filter.toJSON()}_{self.team.pk}") cache.set(cache_key, fake_cached, settings.CACHED_RESULTS_TTL) @@ -7273,7 +8708,11 @@ def test_get_cached_result_month(self): filter = Filter( team=self.team, - data={"date_to": "2020-12-01", "events": [{"id": "sign up", "name": "sign up"}], "interval": "week"}, + data={ + "date_to": "2020-12-01", + "events": [{"id": "sign up", "name": "sign up"}], + "interval": "week", + }, ) res = Trends().get_cached_result(filter, self.team) @@ -7290,7 +8729,11 @@ def test_merge_result(self): } filter = Filter( team=self.team, - data={"date_from": "2020-01-02", "date_to": "2020-01-04", "events": [{"id": "sign up", "name": "sign up"}]}, + data={ + "date_from": "2020-01-02", + "date_to": "2020-01-04", + "events": [{"id": "sign up", "name": "sign up"}], + }, ) result = [{"label": "sign up - Chrome", "data": [15.0, 12.0]}] @@ -7298,10 +8741,13 @@ def test_merge_result(self): self.assertEqual(merged_result[0]["data"], [23.0, 15.0, 12.0]) def test_merge_result_no_cache(self): - filter = Filter( team=self.team, - data={"date_from": "2020-01-02", "date_to": "2020-01-04", "events": [{"id": "sign up", "name": "sign up"}]}, + data={ + "date_from": "2020-01-02", + "date_to": "2020-01-04", + "events": [{"id": "sign up", "name": "sign up"}], + }, ) result = [{"label": "sign up - Chrome", "data": [15.0, 12.0]}] @@ -7326,7 +8772,11 @@ def test_merge_result_multiple(self): } filter = Filter( team=self.team, - data={"date_from": "2020-01-02", "date_to": "2020-01-04", "events": [{"id": "sign up", "name": "sign up"}]}, + data={ + "date_from": "2020-01-02", + "date_to": "2020-01-04", + "events": [{"id": "sign up", "name": "sign up"}], + }, ) result = [ diff --git a/posthog/queries/time_to_see_data/sessions.py b/posthog/queries/time_to_see_data/sessions.py index b7c3274bf9241..8ebeeb8db36a6 100644 --- a/posthog/queries/time_to_see_data/sessions.py +++ b/posthog/queries/time_to_see_data/sessions.py @@ -68,7 +68,10 @@ def get_session_events(query: SessionEventsQuerySerializer) -> Optional[Dict]: events = query_with_columns(GET_SESSION_EVENTS, params) queries = query_with_columns(GET_SESSION_QUERIES, params) session_query = SessionsQuerySerializer( - data={"team_id": query.validated_data["team_id"], "session_id": query.validated_data["session_id"]} + data={ + "team_id": query.validated_data["team_id"], + "session_id": query.validated_data["session_id"], + } ) session_query.is_valid(raise_exception=True) sessions = get_sessions(session_query).data diff --git a/posthog/queries/time_to_see_data/test/test_hierarchy.py b/posthog/queries/time_to_see_data/test/test_hierarchy.py index c449609e26905..609300868ffe1 100644 --- a/posthog/queries/time_to_see_data/test/test_hierarchy.py +++ b/posthog/queries/time_to_see_data/test/test_hierarchy.py @@ -1,16 +1,37 @@ import pytest -from posthog.queries.time_to_see_data.hierarchy import Node, NodeType, construct_hierarchy, is_child +from posthog.queries.time_to_see_data.hierarchy import ( + Node, + NodeType, + construct_hierarchy, + is_child, +) @pytest.mark.parametrize( "potential_parent,potential_child,expected_result", [ # Sessions - (Node(NodeType.SESSION, {"session_id": 1}), Node(NodeType.INTERACTION, {"session_id": 1}), True), - (Node(NodeType.SESSION, {"session_id": 1}), Node(NodeType.QUERY, {"session_id": 1}), True), - (Node(NodeType.SESSION, {"session_id": 2}), Node(NodeType.QUERY, {"session_id": 1}), False), - (Node(NodeType.SESSION, {"session_id": 1}), Node(NodeType.SESSION, {"session_id": 1}), False), + ( + Node(NodeType.SESSION, {"session_id": 1}), + Node(NodeType.INTERACTION, {"session_id": 1}), + True, + ), + ( + Node(NodeType.SESSION, {"session_id": 1}), + Node(NodeType.QUERY, {"session_id": 1}), + True, + ), + ( + Node(NodeType.SESSION, {"session_id": 2}), + Node(NodeType.QUERY, {"session_id": 1}), + False, + ), + ( + Node(NodeType.SESSION, {"session_id": 1}), + Node(NodeType.SESSION, {"session_id": 1}), + False, + ), # Interactions ( Node(NodeType.INTERACTION, {"primary_interaction_id": "1"}), @@ -37,7 +58,11 @@ Node(NodeType.SUBQUERY, {"client_query_id": "123::2543245"}), False, ), - (Node(NodeType.INTERACTION, {"session_id": 1}), Node(NodeType.SESSION, {}), False), + ( + Node(NodeType.INTERACTION, {"session_id": 1}), + Node(NodeType.SESSION, {}), + False, + ), (Node(NodeType.INTERACTION, {}), Node(NodeType.INTERACTION, {}), False), # Events ( @@ -75,17 +100,44 @@ def test_is_child(potential_parent, potential_child, expected_result): def test_construct_hierarchy(): session = {"session_id": 1} - interaction_1 = {**session, "is_primary_interaction": True, "primary_interaction_id": "123"} - event_11 = {**session, "is_primary_interaction": False, "primary_interaction_id": "123", "query_id": "456"} + interaction_1 = { + **session, + "is_primary_interaction": True, + "primary_interaction_id": "123", + } + event_11 = { + **session, + "is_primary_interaction": False, + "primary_interaction_id": "123", + "query_id": "456", + } query_111 = {**session, "client_query_id": "123::456", "is_initial_query": True} - subquery_1111 = {**session, "client_query_id": "123::456", "is_initial_query": False} - event_12 = {**session, "is_primary_interaction": False, "primary_interaction_id": "123", "query_id": "789"} + subquery_1111 = { + **session, + "client_query_id": "123::456", + "is_initial_query": False, + } + event_12 = { + **session, + "is_primary_interaction": False, + "primary_interaction_id": "123", + "query_id": "789", + } query_121 = {**session, "client_query_id": "123::789", "is_initial_query": True} query_13 = {**session, "client_query_id": "123::1111", "is_initial_query": True} - interaction_2 = {**session, "is_primary_interaction": True, "primary_interaction_id": "8888"} + interaction_2 = { + **session, + "is_primary_interaction": True, + "primary_interaction_id": "8888", + } - stray_event = {**session, "is_primary_interaction": False, "primary_interaction_id": "efg", "query_id": "9999"} + stray_event = { + **session, + "is_primary_interaction": False, + "primary_interaction_id": "efg", + "query_id": "9999", + } stray_query = {**session, "client_query_id": "foobar", "is_initial_query": True} result = construct_hierarchy( diff --git a/posthog/queries/trends/breakdown.py b/posthog/queries/trends/breakdown.py index 7e1d8c0b6198b..e891190f6e310 100644 --- a/posthog/queries/trends/breakdown.py +++ b/posthog/queries/trends/breakdown.py @@ -23,7 +23,11 @@ from posthog.models.filters import Filter from posthog.models.filters.mixins.utils import cached_property from posthog.models.property import PropertyGroup -from posthog.models.property.util import get_property_string_expr, normalize_url_breakdown, parse_prop_grouped_clauses +from posthog.models.property.util import ( + get_property_string_expr, + normalize_url_breakdown, + parse_prop_grouped_clauses, +) from posthog.models.team import Team from posthog.models.team.team import groups_on_events_querying_enabled from posthog.queries.breakdown_props import ( @@ -65,8 +69,16 @@ parse_response, process_math, ) -from posthog.queries.util import get_interval_func_ch, get_person_properties_mode, get_start_of_interval_sql -from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id +from posthog.queries.util import ( + get_interval_func_ch, + get_person_properties_mode, + get_start_of_interval_sql, +) +from posthog.utils import ( + PersonOnEventsMode, + encode_get_request_params, + generate_short_id, +) from posthog.queries.person_on_events_v2_sql import PERSON_OVERRIDES_JOIN_SQL @@ -186,23 +198,35 @@ def get_query(self) -> Tuple[str, Dict, Callable]: _params, _breakdown_filter_params = {}, {} if self.filter.breakdown_type == "cohort": - _params, breakdown_filter, _breakdown_filter_params, breakdown_value = self._breakdown_cohort_params() + ( + _params, + breakdown_filter, + _breakdown_filter_params, + breakdown_value, + ) = self._breakdown_cohort_params() else: aggregate_operation_for_breakdown_init = ( "count(*)" if self.entity.math == "dau" or self.entity.math in COUNT_PER_ACTOR_MATH_FUNCTIONS else aggregate_operation ) - _params, breakdown_filter, _breakdown_filter_params, breakdown_value = self._breakdown_prop_params( - aggregate_operation_for_breakdown_init, math_params - ) + ( + _params, + breakdown_filter, + _breakdown_filter_params, + breakdown_value, + ) = self._breakdown_prop_params(aggregate_operation_for_breakdown_init, math_params) if len(_params["values"]) == 0: # If there are no breakdown values, we are sure that there's no relevant events, so instead of adjusting # a "real" SELECT for this, we only include the below dummy SELECT. # It's a drop-in replacement for a "real" one, simply always returning 0 rows. # See https://github.com/PostHog/posthog/pull/5674 for context. - return ("SELECT [now()] AS date, [0] AS total, '' AS breakdown_value LIMIT 0", {}, lambda _: []) + return ( + "SELECT [now()] AS date, [0] AS total, '' AS breakdown_value LIMIT 0", + {}, + lambda _: [], + ) person_join_condition, person_join_params = self._person_join_condition() groups_join_condition, groups_join_params = self._groups_join_condition() @@ -219,16 +243,20 @@ def get_query(self) -> Tuple[str, Dict, Callable]: **sessions_join_params, **sampling_params, } - breakdown_filter_params = {**breakdown_filter_params, **_breakdown_filter_params} + breakdown_filter_params = { + **breakdown_filter_params, + **_breakdown_filter_params, + } if self.filter.display in NON_TIME_SERIES_DISPLAY_TYPES: breakdown_filter = breakdown_filter.format(**breakdown_filter_params) if self.entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]: interval_func = get_interval_func_ch(self.filter.interval) - active_user_format_params, active_user_query_params = get_active_user_params( - self.filter, self.entity, self.team_id - ) + ( + active_user_format_params, + active_user_query_params, + ) = get_active_user_params(self.filter, self.entity, self.team_id) self.params.update(active_user_query_params) conditions = BREAKDOWN_ACTIVE_USER_CONDITIONS_SQL.format( **breakdown_filter_params, **active_user_format_params @@ -297,9 +325,10 @@ def get_query(self) -> Tuple[str, Dict, Callable]: breakdown_filter = breakdown_filter.format(**breakdown_filter_params) if self.entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]: - active_user_format_params, active_user_query_params = get_active_user_params( - self.filter, self.entity, self.team_id - ) + ( + active_user_format_params, + active_user_query_params, + ) = get_active_user_params(self.filter, self.entity, self.team_id) self.params.update(active_user_query_params) conditions = BREAKDOWN_ACTIVE_USER_CONDITIONS_SQL.format( **breakdown_filter_params, **active_user_format_params @@ -386,7 +415,12 @@ def get_query(self) -> Tuple[str, Dict, Callable]: date_to_truncated=get_start_of_interval_sql(self.filter.interval, team=self.team, source="%(date_to)s"), interval_func=get_interval_func_ch(self.filter.interval), ) - self.params.update({"seconds_in_interval": seconds_in_interval, "num_intervals": num_intervals}) + self.params.update( + { + "seconds_in_interval": seconds_in_interval, + "num_intervals": num_intervals, + } + ) return breakdown_query, self.params, self._parse_trend_result(self.filter, self.entity) def _breakdown_cohort_params(self): @@ -422,7 +456,10 @@ def _breakdown_prop_params(self, aggregate_operation: str, math_params: Dict): return ( {"values": values_arr}, BREAKDOWN_PROP_JOIN_SQL if not self.filter.using_histogram else BREAKDOWN_HISTOGRAM_PROP_JOIN_SQL, - {"breakdown_value_expr": breakdown_value, "numeric_property_filter": numeric_property_filter}, + { + "breakdown_value_expr": breakdown_value, + "numeric_property_filter": numeric_property_filter, + }, breakdown_value, ) @@ -447,12 +484,20 @@ def _get_breakdown_value(self, breakdown: str) -> str: ): properties_field = f"group{self.filter.breakdown_group_type_index}_properties" breakdown_value, _ = get_property_string_expr( - "events", breakdown, "%(key)s", properties_field, materialised_table_column=properties_field + "events", + breakdown, + "%(key)s", + properties_field, + materialised_table_column=properties_field, ) elif self.person_on_events_mode != PersonOnEventsMode.DISABLED and self.filter.breakdown_type != "group": if self.filter.breakdown_type == "person": breakdown_value, _ = get_property_string_expr( - "events", breakdown, "%(key)s", "person_properties", materialised_table_column="person_properties" + "events", + breakdown, + "%(key)s", + "person_properties", + materialised_table_column="person_properties", ) else: breakdown_value, _ = get_property_string_expr("events", breakdown, "%(key)s", "properties") @@ -462,7 +507,11 @@ def _get_breakdown_value(self, breakdown: str) -> str: elif self.filter.breakdown_type == "group": properties_field = f"group_properties_{self.filter.breakdown_group_type_index}" breakdown_value, _ = get_property_string_expr( - "groups", breakdown, "%(key)s", properties_field, materialised_table_column="group_properties" + "groups", + breakdown, + "%(key)s", + properties_field, + materialised_table_column="group_properties", ) else: breakdown_value, _ = get_property_string_expr("events", breakdown, "%(key)s", "properties") @@ -565,7 +614,11 @@ def _parse(result: List) -> List: parsed_result.update( { "persons_urls": self._get_persons_url( - filter, entity, self.team, stats[0], result_descriptors["breakdown_value"] + filter, + entity, + self.team, + stats[0], + result_descriptors["breakdown_value"], ) } ) @@ -683,7 +736,10 @@ def _person_join_condition(self) -> Tuple[str, Dict]: def _groups_join_condition(self) -> Tuple[str, Dict]: return GroupsJoinQuery( - self.filter, self.team_id, self.column_optimizer, person_on_events_mode=self.person_on_events_mode + self.filter, + self.team_id, + self.column_optimizer, + person_on_events_mode=self.person_on_events_mode, ).get_join_query() def _sessions_join_condition(self) -> Tuple[str, Dict]: diff --git a/posthog/queries/trends/lifecycle.py b/posthog/queries/trends/lifecycle.py index 8e659a5368970..4821d5295a363 100644 --- a/posthog/queries/trends/lifecycle.py +++ b/posthog/queries/trends/lifecycle.py @@ -13,7 +13,11 @@ from posthog.queries.trends.sql import LIFECYCLE_EVENTS_QUERY, LIFECYCLE_SQL from posthog.queries.trends.util import parse_response from posthog.queries.util import get_person_properties_mode -from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id +from posthog.utils import ( + PersonOnEventsMode, + encode_get_request_params, + generate_short_id, +) # Lifecycle takes an event/action, time range, interval and for every period, splits the users who did the action into 4: # diff --git a/posthog/queries/trends/test/test_breakdowns.py b/posthog/queries/trends/test/test_breakdowns.py index df24ea8c14237..29c917e4dd843 100644 --- a/posthog/queries/trends/test/test_breakdowns.py +++ b/posthog/queries/trends/test/test_breakdowns.py @@ -4,7 +4,11 @@ from posthog.constants import TRENDS_TABLE from posthog.models import Filter from posthog.queries.trends.trends import Trends -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for @@ -17,7 +21,11 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$session_id": "1", "movie_length": 100, "$current_url": "https://example.com"}, + "properties": { + "$session_id": "1", + "movie_length": 100, + "$current_url": "https://example.com", + }, } ], # Duration 60 seconds, with 2 events in 1 session @@ -25,12 +33,20 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$session_id": "2", "movie_length": 50, "$current_url": "https://example.com"}, + "properties": { + "$session_id": "2", + "movie_length": 50, + "$current_url": "https://example.com", + }, }, { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 2), - "properties": {"$session_id": "2", "movie_length": 75, "$current_url": "https://example.com"}, + "properties": { + "$session_id": "2", + "movie_length": 75, + "$current_url": "https://example.com", + }, }, ], # Duration 90 seconds, but session spans query boundary, so only a single event is counted @@ -90,7 +106,14 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}): response = Trends().run( Filter( data={ - "events": [{"id": "watched movie", "name": "watched movie", "type": "events", **events_extra}], + "events": [ + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + **events_extra, + } + ], "date_from": "2020-01-02T00:00:00Z", "date_to": "2020-01-12T00:00:00Z", **extra, @@ -106,7 +129,13 @@ def test_breakdown_by_session_duration_of_events(self): { "breakdown": "$session_duration", "breakdown_type": "session", - "properties": [{"key": "$current_url", "operator": "is_not", "value": ["https://test.com"]}], + "properties": [ + { + "key": "$current_url", + "operator": "is_not", + "value": ["https://test.com"], + } + ], } ) @@ -128,16 +157,34 @@ def test_breakdown_by_session_duration_of_events_with_bucketing(self): "breakdown": "$session_duration", "breakdown_type": "session", "breakdown_histogram_bin_count": 3, - "properties": [{"key": "$current_url", "operator": "is_not", "value": ["https://test.com"]}], + "properties": [ + { + "key": "$current_url", + "operator": "is_not", + "value": ["https://test.com"], + } + ], } ) self.assertEqual( [(item["breakdown_value"], item["count"], item["data"]) for item in response], [ - ("[0.0,69.92]", 3.0, [3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("[69.92,110.72]", 1.0, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("[110.72,180.01]", 5.0, [0.0, 0.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), + ( + "[0.0,69.92]", + 3.0, + [3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "[69.92,110.72]", + 1.0, + [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "[110.72,180.01]", + 5.0, + [0.0, 0.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), ], ) @@ -160,7 +207,8 @@ def test_breakdown_by_session_duration_of_events_single_aggregate(self): @snapshot_clickhouse_queries def test_breakdown_by_session_duration_of_unique_sessions(self): response = self._run( - {"breakdown": "$session_duration", "breakdown_type": "session"}, events_extra={"math": "unique_session"} + {"breakdown": "$session_duration", "breakdown_type": "session"}, + events_extra={"math": "unique_session"}, ) self.assertEqual( @@ -177,62 +225,126 @@ def test_breakdown_by_session_duration_of_unique_sessions(self): @snapshot_clickhouse_queries def test_breakdown_by_session_duration_of_unique_sessions_with_bucketing(self): response = self._run( - {"breakdown": "$session_duration", "breakdown_type": "session", "breakdown_histogram_bin_count": 3}, + { + "breakdown": "$session_duration", + "breakdown_type": "session", + "breakdown_histogram_bin_count": 3, + }, events_extra={"math": "unique_session"}, ) self.assertEqual( [(item["breakdown_value"], item["count"], item["data"]) for item in response], [ - ("[0.0,69.92]", 2.0, [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("[69.92,110.72]", 1.0, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("[110.72,180.01]", 3.0, [0.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), + ( + "[0.0,69.92]", + 2.0, + [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "[69.92,110.72]", + 1.0, + [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "[110.72,180.01]", + 3.0, + [0.0, 0.0, 1.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), ], ) @snapshot_clickhouse_queries def test_breakdown_by_event_property_with_bucketing(self): response = self._run( - {"breakdown": "movie_length", "breakdown_type": "event", "breakdown_histogram_bin_count": 3} + { + "breakdown": "movie_length", + "breakdown_type": "event", + "breakdown_histogram_bin_count": 3, + } ) self.assertEqual( [(item["breakdown_value"], item["count"], item["data"]) for item in response], [ - ("[25.0,66.25]", 4.0, [2.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("[66.25,98.37]", 2.0, [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("[98.37,1000.01]", 2.0, [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), + ( + "[25.0,66.25]", + 4.0, + [2.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "[66.25,98.37]", + 2.0, + [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "[98.37,1000.01]", + 2.0, + [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), ], ) @snapshot_clickhouse_queries def test_breakdown_by_event_property_of_unique_sessions_with_bucketing(self): response = self._run( - {"breakdown": "movie_length", "breakdown_type": "event", "breakdown_histogram_bin_count": 3}, + { + "breakdown": "movie_length", + "breakdown_type": "event", + "breakdown_histogram_bin_count": 3, + }, events_extra={"math": "unique_session"}, ) self.assertEqual( [(item["breakdown_value"], item["count"], item["data"]) for item in response], [ - ("[25.0,66.25]", 3.0, [2.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("[66.25,98.37]", 2.0, [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("[98.37,1000.01]", 2.0, [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), + ( + "[25.0,66.25]", + 3.0, + [2.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "[66.25,98.37]", + 2.0, + [1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "[98.37,1000.01]", + 2.0, + [1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), ], ) def test_breakdown_by_event_property_with_bucketing_and_duplicate_buckets(self): journey = { "person1": [ - {"event": "watched tv", "timestamp": datetime(2020, 1, 2, 12, 1), "properties": {"episode_length": 300}} + { + "event": "watched tv", + "timestamp": datetime(2020, 1, 2, 12, 1), + "properties": {"episode_length": 300}, + } ], "person2": [ - {"event": "watched tv", "timestamp": datetime(2020, 1, 4, 12, 1), "properties": {"episode_length": 300}} + { + "event": "watched tv", + "timestamp": datetime(2020, 1, 4, 12, 1), + "properties": {"episode_length": 300}, + } ], "person3": [ - {"event": "watched tv", "timestamp": datetime(2020, 1, 6, 12, 1), "properties": {"episode_length": 300}} + { + "event": "watched tv", + "timestamp": datetime(2020, 1, 6, 12, 1), + "properties": {"episode_length": 300}, + } ], "person4": [ - {"event": "watched tv", "timestamp": datetime(2020, 1, 8, 12, 1), "properties": {"episode_length": 300}} + { + "event": "watched tv", + "timestamp": datetime(2020, 1, 8, 12, 1), + "properties": {"episode_length": 300}, + } ], } @@ -256,22 +368,44 @@ def test_breakdown_by_event_property_with_bucketing_and_duplicate_buckets(self): self.assertEqual( [(item["breakdown_value"], item["count"], item["data"]) for item in response], - [("[300.0,300.01]", 4.0, [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0])], + [ + ( + "[300.0,300.01]", + 4.0, + [1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + ) + ], ) def test_breakdown_by_event_property_with_bucketing_and_single_bucket(self): journey = { "person1": [ - {"event": "watched tv", "timestamp": datetime(2020, 1, 2, 12, 1), "properties": {"episode_length": 300}} + { + "event": "watched tv", + "timestamp": datetime(2020, 1, 2, 12, 1), + "properties": {"episode_length": 300}, + } ], "person2": [ - {"event": "watched tv", "timestamp": datetime(2020, 1, 4, 12, 1), "properties": {"episode_length": 300}} + { + "event": "watched tv", + "timestamp": datetime(2020, 1, 4, 12, 1), + "properties": {"episode_length": 300}, + } ], "person3": [ - {"event": "watched tv", "timestamp": datetime(2020, 1, 5, 12, 1), "properties": {"episode_length": 320}} + { + "event": "watched tv", + "timestamp": datetime(2020, 1, 5, 12, 1), + "properties": {"episode_length": 320}, + } ], "person4": [ - {"event": "watched tv", "timestamp": datetime(2020, 1, 6, 12, 1), "properties": {"episode_length": 305}} + { + "event": "watched tv", + "timestamp": datetime(2020, 1, 6, 12, 1), + "properties": {"episode_length": 305}, + } ], } @@ -293,7 +427,13 @@ def test_breakdown_by_event_property_with_bucketing_and_single_bucket(self): self.assertEqual( [(item["breakdown_value"], item["count"], item["data"]) for item in response], - [("[300.0,320.01]", 4.0, [1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])], + [ + ( + "[300.0,320.01]", + 4.0, + [1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) + ], ) @snapshot_clickhouse_queries @@ -301,7 +441,14 @@ def test_breakdown_by_event_property_with_entity_session_filter(self): response = self._run( {"breakdown": "$current_url", "breakdown_type": "event"}, events_extra={ - "properties": [{"key": "$session_duration", "type": "session", "operator": "gt", "value": 30}] + "properties": [ + { + "key": "$session_duration", + "type": "session", + "operator": "gt", + "value": 30, + } + ] }, ) @@ -309,7 +456,11 @@ def test_breakdown_by_event_property_with_entity_session_filter(self): [(item["breakdown_value"], item["count"], item["data"]) for item in response], [ ("", 6.0, [1.0, 0.0, 1.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("https://example.com", 2.0, [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), + ( + "https://example.com", + 2.0, + [2.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), ], ) @@ -326,6 +477,10 @@ def test_breakdown_histogram_by_missing_property_regression(self): self.assertEqual( [(item["breakdown_value"], item["count"], item["data"]) for item in response], [ - ("[nan,nan]", 0.0, [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), + ( + "[nan,nan]", + 0.0, + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), ], ) diff --git a/posthog/queries/trends/test/test_breakdowns_by_current_url.py b/posthog/queries/trends/test/test_breakdowns_by_current_url.py index 76b7ad94902ea..bc7a81595843b 100644 --- a/posthog/queries/trends/test/test_breakdowns_by_current_url.py +++ b/posthog/queries/trends/test/test_breakdowns_by_current_url.py @@ -3,7 +3,11 @@ from posthog.models import Filter from posthog.queries.trends.trends import Trends -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + snapshot_clickhouse_queries, +) from posthog.test.test_journeys import journeys_for @@ -16,13 +20,19 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com", "$pathname": ""}, + "properties": { + "$current_url": "https://example.com", + "$pathname": "", + }, }, # trailing question mark { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com?", "$pathname": "?"}, + "properties": { + "$current_url": "https://example.com?", + "$pathname": "?", + }, }, ], "person2": [ @@ -30,13 +40,19 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com/", "$pathname": "/"}, + "properties": { + "$current_url": "https://example.com/", + "$pathname": "/", + }, }, # trailing hash { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com#", "$pathname": "#"}, + "properties": { + "$current_url": "https://example.com#", + "$pathname": "#", + }, }, ], "person3": [ @@ -44,7 +60,10 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com/home", "$pathname": "/home"}, + "properties": { + "$current_url": "https://example.com/home", + "$pathname": "/home", + }, }, ], "person4": [ @@ -52,19 +71,28 @@ def setUp(self): { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com/home/", "$pathname": "/home/"}, + "properties": { + "$current_url": "https://example.com/home/", + "$pathname": "/home/", + }, }, # trailing hash { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com/home#", "$pathname": "/home#"}, + "properties": { + "$current_url": "https://example.com/home#", + "$pathname": "/home#", + }, }, # all the things { "event": "watched movie", "timestamp": datetime(2020, 1, 2, 12, 1), - "properties": {"$current_url": "https://example.com/home/?#", "$pathname": "/home/?#"}, + "properties": { + "$current_url": "https://example.com/home/?#", + "$pathname": "/home/?#", + }, }, ], } @@ -75,7 +103,14 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}): response = Trends().run( Filter( data={ - "events": [{"id": "watched movie", "name": "watched movie", "type": "events", **events_extra}], + "events": [ + { + "id": "watched movie", + "name": "watched movie", + "type": "events", + **events_extra, + } + ], "date_from": "2020-01-02T00:00:00Z", "date_to": "2020-01-12T00:00:00Z", **extra, @@ -87,7 +122,13 @@ def _run(self, extra: Dict = {}, events_extra: Dict = {}): @snapshot_clickhouse_queries def test_breakdown_by_pathname(self) -> None: - response = self._run({"breakdown": "$pathname", "breakdown_type": "event", "breakdown_normalize_url": True}) + response = self._run( + { + "breakdown": "$pathname", + "breakdown_type": "event", + "breakdown_normalize_url": True, + } + ) assert [(item["breakdown_value"], item["count"], item["data"]) for item in response] == [ ("/", 4.0, [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), @@ -96,9 +137,23 @@ def test_breakdown_by_pathname(self) -> None: @snapshot_clickhouse_queries def test_breakdown_by_current_url(self) -> None: - response = self._run({"breakdown": "$current_url", "breakdown_type": "event", "breakdown_normalize_url": True}) + response = self._run( + { + "breakdown": "$current_url", + "breakdown_type": "event", + "breakdown_normalize_url": True, + } + ) assert [(item["breakdown_value"], item["count"], item["data"]) for item in response] == [ - ("https://example.com", 4.0, [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), - ("https://example.com/home", 4.0, [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), + ( + "https://example.com", + 4.0, + [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), + ( + "https://example.com/home", + 4.0, + [4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ), ] diff --git a/posthog/queries/trends/test/test_formula.py b/posthog/queries/trends/test/test_formula.py index 5dffeac08959c..d6dc332fbf4f4 100644 --- a/posthog/queries/trends/test/test_formula.py +++ b/posthog/queries/trends/test/test_formula.py @@ -7,7 +7,12 @@ from posthog.models.filters.filter import Filter from posthog.models.group.util import create_group from posthog.queries.trends.trends import Trends -from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, snapshot_clickhouse_queries +from posthog.test.base import ( + APIBaseTest, + ClickhouseTestMixin, + _create_event, + snapshot_clickhouse_queries, +) class TestFormula(ClickhouseTestMixin, APIBaseTest): @@ -17,10 +22,17 @@ def setUp(self): super().setUp() Person.objects.create( - team_id=self.team.pk, distinct_ids=["blabla", "anonymous_id"], properties={"$some_prop": "some_val"} + team_id=self.team.pk, + distinct_ids=["blabla", "anonymous_id"], + properties={"$some_prop": "some_val"}, ) - create_group(team_id=self.team.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"}) + create_group( + team_id=self.team.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, + ) with freeze_time("2020-01-02T13:01:01Z"): _create_event( @@ -39,33 +51,58 @@ def setUp(self): team=self.team, event="session start", distinct_id="blabla", - properties={"session duration": 300, "location": "Paris", "$session_id": "1", "$group_0": "org:5"}, + properties={ + "session duration": 300, + "location": "Paris", + "$session_id": "1", + "$group_0": "org:5", + }, ) _create_event( team=self.team, event="session start", distinct_id="blabla", - properties={"session duration": 400, "location": "London", "$session_id": "1", "$group_0": "org:5"}, + properties={ + "session duration": 400, + "location": "London", + "$session_id": "1", + "$group_0": "org:5", + }, ) with freeze_time("2020-01-03T13:01:01Z"): _create_event( team=self.team, event="session start", distinct_id="blabla", - properties={"session duration": 400, "location": "London", "$session_id": "1", "$group_0": "org:5"}, + properties={ + "session duration": 400, + "location": "London", + "$session_id": "1", + "$group_0": "org:5", + }, ) with freeze_time("2020-01-03T13:04:01Z"): _create_event( team=self.team, event="session start", distinct_id="blabla", - properties={"session duration": 500, "location": "London", "$session_id": "1", "$group_0": "org:5"}, + properties={ + "session duration": 500, + "location": "London", + "$session_id": "1", + "$group_0": "org:5", + }, ) _create_event( team=self.team, event="session end", distinct_id="blabla", - properties={"session duration": 500, "location": "London", "$session_id": "1", "$group_0": "org:5"}, + properties={ + "session duration": 500, + "location": "London", + "$session_id": "1", + "$group_0": "org:5", + }, ) _create_event( @@ -98,8 +135,16 @@ def _run(self, extra: Dict = {}, run_at: Optional[str] = None): Filter( data={ "events": [ - {"id": "session start", "math": "sum", "math_property": "session duration"}, - {"id": "session start", "math": "avg", "math_property": "session duration"}, + { + "id": "session start", + "math": "sum", + "math_property": "session duration", + }, + { + "id": "session start", + "math": "avg", + "math_property": "session duration", + }, ], "formula": "A + B", **extra, @@ -191,13 +236,28 @@ def test_month_interval(self): self.assertEqual(data, [0.0, 0.0, 2160.0]) def test_formula(self): - self.assertEqual(self._run({"formula": "A - B"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 600.0, 450.0, 0.0]) - self.assertEqual(self._run({"formula": "A * B"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 270000.0, 405000.0, 0.0]) - self.assertEqual(self._run({"formula": "A / B"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 2.0, 0.0]) - self.assertEqual(self._run({"formula": "(A/3600)/B"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + self.assertEqual( + self._run({"formula": "A - B"})[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 600.0, 450.0, 0.0], + ) + self.assertEqual( + self._run({"formula": "A * B"})[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 270000.0, 405000.0, 0.0], + ) + self.assertEqual( + self._run({"formula": "A / B"})[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 3.0, 2.0, 0.0], + ) + self.assertEqual( + self._run({"formula": "(A/3600)/B"})[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) self.assertEqual(self._run({"formula": "(A/3600)/B"})[0]["count"], 0) - self.assertEqual(self._run({"formula": "A/0"})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]) + self.assertEqual( + self._run({"formula": "A/0"})[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ) self.assertEqual(self._run({"formula": "A/0"})[0]["count"], 0) @snapshot_clickhouse_queries @@ -228,7 +288,12 @@ def test_regression_formula_with_unique_sessions_2x_and_duration_filter(self): "id": "session start", "math": "unique_session", "properties": [ - {"key": "$session_duration", "value": 12, "operator": "gt", "type": "session"} + { + "key": "$session_duration", + "value": 12, + "operator": "gt", + "type": "session", + } ], }, {"id": "session start", "math": "unique_session"}, @@ -252,14 +317,24 @@ def test_regression_formula_with_unique_sessions_2x_and_duration_filter_2x(self) "id": "$autocapture", "math": "unique_session", "properties": [ - {"key": "$session_duration", "type": "session", "value": 30, "operator": "lt"} + { + "key": "$session_duration", + "type": "session", + "value": 30, + "operator": "lt", + } ], }, { "id": "session start", "math": "unique_session", "properties": [ - {"key": "$session_duration", "type": "session", "value": 500, "operator": "gt"} + { + "key": "$session_duration", + "type": "session", + "value": 500, + "operator": "gt", + } ], }, ], @@ -286,7 +361,13 @@ def test_regression_formula_with_session_duration_aggregation(self): "math": "avg", "math_property": "$session_duration", }, - {"type": "events", "id": "session end", "order": 1, "name": "$pageview", "math": "total"}, + { + "type": "events", + "id": "session end", + "order": 1, + "name": "$pageview", + "math": "total", + }, ], "formula": "A / B", } @@ -349,12 +430,19 @@ def test_breakdown_aggregated(self): @snapshot_clickhouse_queries def test_breakdown_with_different_breakdown_values_per_series(self): - response = self._run( { "events": [ - {"id": "session start", "math": "sum", "math_property": "session duration"}, - {"id": "session end", "math": "sum", "math_property": "session duration"}, + { + "id": "session start", + "math": "sum", + "math_property": "session duration", + }, + { + "id": "session end", + "math": "sum", + "math_property": "session duration", + }, ], "formula": "A + B", "breakdown": "location", @@ -387,8 +475,18 @@ def test_breakdown_counts_of_different_events_one_without_events(self): "breakdown": "location", "breakdown_type": "event", "events": [ - {"id": "session start", "name": "session start", "type": "events", "order": 0}, - {"id": "session error", "name": "session error", "type": "events", "order": 1}, + { + "id": "session start", + "name": "session start", + "type": "events", + "order": 0, + }, + { + "id": "session error", + "name": "session error", + "type": "events", + "order": 1, + }, ], } ), @@ -468,9 +566,15 @@ def test_breakdown_cohort(self): @snapshot_clickhouse_queries def test_breakdown_hogql(self): response = self._run( - {"breakdown": "concat(person.properties.$some_prop, ' : ', properties.location)", "breakdown_type": "hogql"} + { + "breakdown": "concat(person.properties.$some_prop, ' : ', properties.location)", + "breakdown_type": "hogql", + } + ) + self.assertEqual( + [series["label"] for series in response], + ["some_val : London", "some_val : Paris"], ) - self.assertEqual([series["label"] for series in response], ["some_val : London", "some_val : Paris"]) self.assertEqual( [ [0.0, 0.0, 0.0, 0.0, 0.0, 800.0, 1350.0, 0.0], @@ -481,7 +585,11 @@ def test_breakdown_hogql(self): def test_breakdown_mismatching_sizes(self): response = self._run( - {"events": [{"id": "session start"}, {"id": "session end"}], "breakdown": "location", "formula": "A + B"} + { + "events": [{"id": "session start"}, {"id": "session end"}], + "breakdown": "location", + "formula": "A + B", + } ) self.assertEqual(response[0]["label"], "London") @@ -522,7 +630,11 @@ def test_event_properties(self): "math_property": "session duration", "properties": [{"key": "$current_url", "value": "http://example.org"}], }, - {"id": "session start", "math": "avg", "math_property": "session duration"}, + { + "id": "session start", + "math": "avg", + "math_property": "session duration", + }, ] } )[0]["data"], @@ -541,7 +653,8 @@ def test_aggregated(self): def test_cumulative(self): self.assertEqual( - self._run({"display": TRENDS_CUMULATIVE})[0]["data"], [0.0, 0.0, 0.0, 0.0, 0.0, 1200.0, 2550.0, 2550.0] + self._run({"display": TRENDS_CUMULATIVE})[0]["data"], + [0.0, 0.0, 0.0, 0.0, 0.0, 1200.0, 2550.0, 2550.0], ) def test_multiple_events(self): @@ -550,9 +663,21 @@ def test_multiple_events(self): self._run( { "events": [ - {"id": "session start", "math": "sum", "math_property": "session duration"}, - {"id": "session start", "math": "avg", "math_property": "session duration"}, - {"id": "session start", "math": "avg", "math_property": "session duration"}, + { + "id": "session start", + "math": "sum", + "math_property": "session duration", + }, + { + "id": "session start", + "math": "avg", + "math_property": "session duration", + }, + { + "id": "session start", + "math": "avg", + "math_property": "session duration", + }, ] } )[0]["data"], @@ -573,13 +698,20 @@ def test_session_formulas(self): ) def test_group_formulas(self): - self.assertEqual( self._run( { "events": [ - {"id": "session start", "math": "unique_group", "math_group_type_index": 0}, - {"id": "session start", "math": "unique_group", "math_group_type_index": 0}, + { + "id": "session start", + "math": "unique_group", + "math_group_type_index": 0, + }, + { + "id": "session start", + "math": "unique_group", + "math_group_type_index": 0, + }, ] } )[0]["data"], diff --git a/posthog/queries/trends/test/test_paging_breakdowns.py b/posthog/queries/trends/test/test_paging_breakdowns.py index 573d036e4b82e..e15175cd92d76 100644 --- a/posthog/queries/trends/test/test_paging_breakdowns.py +++ b/posthog/queries/trends/test/test_paging_breakdowns.py @@ -41,7 +41,17 @@ def _run(self, extra: Dict = {}, run_at: Optional[str] = None): with freeze_time(run_at or "2020-01-04T13:01:01Z"): action_response = Trends().run( Filter( - data={"events": [{"id": "$pageview", "name": "$pageview", "type": "events", "order": 0}], **extra} + data={ + "events": [ + { + "id": "$pageview", + "name": "$pageview", + "type": "events", + "order": 0, + } + ], + **extra, + } ), self.team, ) diff --git a/posthog/queries/trends/test/test_person.py b/posthog/queries/trends/test/test_person.py index bfd18b6ed8de8..1cd04cfd3d206 100644 --- a/posthog/queries/trends/test/test_person.py +++ b/posthog/queries/trends/test/test_person.py @@ -12,7 +12,9 @@ from posthog.models.group.util import create_group from posthog.models.group_type_mapping import GroupTypeMapping from posthog.queries.trends.trends_actors import TrendsActors -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -103,7 +105,11 @@ def test_person_query_does_not_include_recording_events_if_flag_not_set(self): event = {"id": "pageview", "name": "pageview", "type": "events", "order": 0} filter = Filter( - data={"date_from": "2021-01-21T00:00:00Z", "date_to": "2021-01-21T23:59:59Z", "events": [event]} + data={ + "date_from": "2021-01-21T00:00:00Z", + "date_to": "2021-01-21T23:59:59Z", + "events": [event], + } ) entity = Entity(event) _, serialized_actors, _ = TrendsActors(self.team, entity, filter).get_actors() @@ -125,7 +131,11 @@ def test_group_query_includes_recording_events(self): ) _create_event( - event="pageview", distinct_id="u1", team=self.team, timestamp=timezone.now(), properties={"$group_0": "bla"} + event="pageview", + distinct_id="u1", + team=self.team, + timestamp=timezone.now(), + properties={"$group_0": "bla"}, ) _create_event( event="pageview", @@ -204,7 +214,10 @@ def test_weekly_active_users(self): data = response.json() self.assertEqual(data.get("results")[0].get("count"), 2) - self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_17", "u_16"]) + self.assertEqual( + [item["name"] for item in data.get("results")[0].get("people")], + ["u_17", "u_16"], + ) def test_weekly_active_users_grouped_by_week(self): for d in range(10, 18): # create a person and event for each day 10. Sep - 17. Sep @@ -271,7 +284,10 @@ def test_weekly_active_users_cumulative(self): data = response.json() self.assertEqual(data.get("results")[0].get("count"), 2) - self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["u_11", "u_10"]) + self.assertEqual( + [item["name"] for item in data.get("results")[0].get("people")], + ["u_11", "u_10"], + ) @skip("see PR 17356") def test_weekly_active_users_breakdown(self): @@ -313,4 +329,7 @@ def test_weekly_active_users_breakdown(self): data = response.json() # self.assertEqual(data.get("results")[0].get("count"), 2) - self.assertEqual([item["name"] for item in data.get("results")[0].get("people")], ["a_17", "a_16"]) + self.assertEqual( + [item["name"] for item in data.get("results")[0].get("people")], + ["a_17", "a_16"], + ) diff --git a/posthog/queries/trends/total_volume.py b/posthog/queries/trends/total_volume.py index 154e105e77f92..31f5d83b4c15c 100644 --- a/posthog/queries/trends/total_volume.py +++ b/posthog/queries/trends/total_volume.py @@ -38,8 +38,16 @@ parse_response, process_math, ) -from posthog.queries.util import TIME_IN_SECONDS, get_interval_func_ch, get_start_of_interval_sql -from posthog.utils import PersonOnEventsMode, encode_get_request_params, generate_short_id +from posthog.queries.util import ( + TIME_IN_SECONDS, + get_interval_func_ch, + get_start_of_interval_sql, +) +from posthog.utils import ( + PersonOnEventsMode, + encode_get_request_params, + generate_short_id, +) class TrendsTotalVolume: @@ -115,7 +123,11 @@ def _total_volume_query(self, entity: Entity, filter: Filter, team: Team) -> Tup tag_queries(trend_volume_type="volume_aggregate") content_sql = VOLUME_AGGREGATE_SQL.format(event_query_base=event_query_base, **content_sql_params) - return (content_sql, params, self._parse_aggregate_volume_result(filter, entity, team.id)) + return ( + content_sql, + params, + self._parse_aggregate_volume_result(filter, entity, team.id), + ) else: tag_queries(trend_volume_display="time_series") null_sql = NULL_SQL.format( @@ -133,12 +145,17 @@ def _total_volume_query(self, entity: Entity, filter: Filter, team: Team) -> Tup aggregator=determine_aggregator(entity, team), # TODO: Support groups officialy and with tests date_to_truncated=get_start_of_interval_sql(filter.interval, team=team, source="%(date_to)s"), date_from_active_users_adjusted_truncated=get_start_of_interval_sql( - filter.interval, team=team, source="%(date_from_active_users_adjusted)s" + filter.interval, + team=team, + source="%(date_from_active_users_adjusted)s", ), **content_sql_params, **trend_event_query.active_user_params, ) - elif filter.display == TRENDS_CUMULATIVE and entity.math in (UNIQUE_USERS, UNIQUE_GROUPS): + elif filter.display == TRENDS_CUMULATIVE and entity.math in ( + UNIQUE_USERS, + UNIQUE_GROUPS, + ): # :TODO: Consider using bitmap-per-date to speed this up tag_queries(trend_volume_type="cumulative_actors") cumulative_sql = CUMULATIVE_SQL.format( @@ -272,7 +289,11 @@ def _offset_date_to(self, point_datetime: datetime, filter: Filter, entity: Enti return offset_time_series_date_by_interval(point_datetime, filter=filter, team=team) def _get_persons_url( - self, filter: Filter, entity: Entity, team: Team, point_datetimes: List[datetime] + self, + filter: Filter, + entity: Entity, + team: Team, + point_datetimes: List[datetime], ) -> List[Dict[str, Any]]: persons_url = [] cache_invalidation_key = generate_short_id() diff --git a/posthog/queries/trends/trends.py b/posthog/queries/trends/trends.py index 049417799bb8b..479d64f3259b1 100644 --- a/posthog/queries/trends/trends.py +++ b/posthog/queries/trends/trends.py @@ -101,7 +101,12 @@ def adjusted_filter(self, filter: Filter, team: Team) -> Tuple[Filter, Optional[ return new_filter, label_to_payload def merge_results( - self, result, cached_result: Optional[Dict[str, Any]], entity_order: int, filter: Filter, team: Team + self, + result, + cached_result: Optional[Dict[str, Any]], + entity_order: int, + filter: Filter, + team: Team, ): if cached_result and filter.display != TRENDS_CUMULATIVE: new_res = [] @@ -139,7 +144,11 @@ def _run_query(self, filter: Filter, team: Team, entity: Entity) -> List[Dict[st result = parse_function(result) serialized_data = self._format_serialized(entity, result) merged_results, cached_result = self.merge_results( - serialized_data, cached_result, entity.order or entity.index, filter, team + serialized_data, + cached_result, + entity.order or entity.index, + filter, + team, ) if cached_result: @@ -149,7 +158,15 @@ def _run_query(self, filter: Filter, team: Team, entity: Entity) -> List[Dict[st return merged_results def _run_query_for_threading( - self, result: List, index: int, query_type, sql, params, query_tags: Dict, filter: Filter, team_id: int + self, + result: List, + index: int, + query_type, + sql, + params, + query_tags: Dict, + filter: Filter, + team_id: int, ): tag_queries(**query_tags) with push_scope() as scope: @@ -171,7 +188,16 @@ def _run_parallel(self, filter: Filter, team: Team) -> List[Dict[str, Any]]: sql_statements_with_params[entity.index] = (sql, query_params) thread = threading.Thread( target=self._run_query_for_threading, - args=(result, entity.index, query_type, sql, query_params, get_query_tags(), adjusted_filter, team.pk), + args=( + result, + entity.index, + query_type, + sql, + query_params, + get_query_tags(), + adjusted_filter, + team.pk, + ), ) jobs.append(thread) @@ -189,12 +215,20 @@ def _run_parallel(self, filter: Filter, team: Team) -> List[Dict[str, Any]]: scope.set_tag("team", team) for i, entity in enumerate(filter.entities): scope.set_context( - "query", {"sql": sql_statements_with_params[i][0], "params": sql_statements_with_params[i][1]} + "query", + { + "sql": sql_statements_with_params[i][0], + "params": sql_statements_with_params[i][1], + }, ) serialized_data = cast(List[Callable], parse_functions)[entity.index](result[entity.index]) serialized_data = self._format_serialized(entity, serialized_data) merged_results, cached_result = self.merge_results( - serialized_data, cached_result, entity.order or entity.index, filter, team + serialized_data, + cached_result, + entity.order or entity.index, + filter, + team, ) result[entity.index] = merged_results diff --git a/posthog/queries/trends/trends_actors.py b/posthog/queries/trends/trends_actors.py index ed484968af7dd..228eac4f799e3 100644 --- a/posthog/queries/trends/trends_actors.py +++ b/posthog/queries/trends/trends_actors.py @@ -43,7 +43,8 @@ def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: self._filter = self._filter.shallow_clone( { "properties": self._filter.property_groups.combine_properties( - PropertyOperatorType.AND, [Property(key="id", value=cohort.pk, type="cohort")] + PropertyOperatorType.AND, + [Property(key="id", value=cohort.pk, type="cohort")], ).to_dict() } ) @@ -115,7 +116,10 @@ def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: else "" ) - actor_value_expression, actor_value_params = self._aggregation_actor_value_expression_with_params + ( + actor_value_expression, + actor_value_params, + ) = self._aggregation_actor_value_expression_with_params return ( GET_ACTORS_FROM_EVENT_QUERY.format( @@ -126,7 +130,12 @@ def actor_query(self, limit_actors: Optional[bool] = True) -> Tuple[str, Dict]: limit="LIMIT %(limit)s" if limit_actors else "", offset="OFFSET %(offset)s" if limit_actors else "", ), - {**params, **actor_value_params, "offset": self._filter.offset, "limit": self._filter.limit or 100}, + { + **params, + **actor_value_params, + "offset": self._filter.offset, + "limit": self._filter.limit or 100, + }, ) @cached_property diff --git a/posthog/queries/trends/trends_event_query.py b/posthog/queries/trends/trends_event_query.py index c5504ce388486..6ef5cf009dafc 100644 --- a/posthog/queries/trends/trends_event_query.py +++ b/posthog/queries/trends/trends_event_query.py @@ -24,7 +24,13 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: + " ".join( [ ", " - + get_property_string_expr("events", property, f"'{property}'", "properties", table_alias="e")[0] + + get_property_string_expr( + "events", + property, + f"'{property}'", + "properties", + table_alias="e", + )[0] + f" as {property}" for property in self._extra_event_properties ] diff --git a/posthog/queries/trends/trends_event_query_base.py b/posthog/queries/trends/trends_event_query_base.py index 93dd843349046..4eaaa46d75a52 100644 --- a/posthog/queries/trends/trends_event_query_base.py +++ b/posthog/queries/trends/trends_event_query_base.py @@ -1,6 +1,11 @@ from typing import Any, Dict, Tuple -from posthog.constants import MONTHLY_ACTIVE, UNIQUE_USERS, WEEKLY_ACTIVE, PropertyOperatorType +from posthog.constants import ( + MONTHLY_ACTIVE, + UNIQUE_USERS, + WEEKLY_ACTIVE, + PropertyOperatorType, +) from posthog.models import Entity from posthog.models.entity.util import get_entity_filtering_params from posthog.models.filters.filter import Filter @@ -8,7 +13,10 @@ from posthog.queries.event_query import EventQuery from posthog.queries.person_query import PersonQuery from posthog.queries.query_date_range import QueryDateRange -from posthog.queries.trends.util import COUNT_PER_ACTOR_MATH_FUNCTIONS, get_active_user_params +from posthog.queries.trends.util import ( + COUNT_PER_ACTOR_MATH_FUNCTIONS, + get_active_user_params, +) from posthog.queries.util import get_person_properties_mode from posthog.utils import PersonOnEventsMode @@ -117,9 +125,10 @@ def _get_date_filter(self) -> Tuple[str, Dict]: self.parsed_date_to = parsed_date_to if self._entity.math in [WEEKLY_ACTIVE, MONTHLY_ACTIVE]: - active_user_format_params, active_user_query_params = get_active_user_params( - self._filter, self._entity, self._team_id - ) + ( + active_user_format_params, + active_user_query_params, + ) = get_active_user_params(self._filter, self._entity, self._team_id) self.active_user_params = active_user_format_params date_params.update(active_user_query_params) diff --git a/posthog/queries/trends/util.py b/posthog/queries/trends/util.py index 46cd2a8041f32..382201a9e0203 100644 --- a/posthog/queries/trends/util.py +++ b/posthog/queries/trends/util.py @@ -7,7 +7,13 @@ from dateutil.relativedelta import relativedelta from rest_framework.exceptions import ValidationError -from posthog.constants import MONTHLY_ACTIVE, NON_TIME_SERIES_DISPLAY_TYPES, UNIQUE_GROUPS, UNIQUE_USERS, WEEKLY_ACTIVE +from posthog.constants import ( + MONTHLY_ACTIVE, + NON_TIME_SERIES_DISPLAY_TYPES, + UNIQUE_GROUPS, + UNIQUE_USERS, + WEEKLY_ACTIVE, +) from posthog.hogql.hogql import translate_hogql from posthog.models.entity import Entity from posthog.models.event.sql import EVENT_JOIN_PERSON_SQL @@ -42,7 +48,10 @@ "p99_count_per_actor": "quantile(0.99)", } -ALL_SUPPORTED_MATH_FUNCTIONS = [*list(PROPERTY_MATH_FUNCTIONS.keys()), *list(COUNT_PER_ACTOR_MATH_FUNCTIONS.keys())] +ALL_SUPPORTED_MATH_FUNCTIONS = [ + *list(PROPERTY_MATH_FUNCTIONS.keys()), + *list(COUNT_PER_ACTOR_MATH_FUNCTIONS.keys()), +] def process_math( @@ -72,7 +81,8 @@ def process_math( elif entity.math in PROPERTY_MATH_FUNCTIONS: if entity.math_property is None: raise ValidationError( - {"math_property": "This field is required when `math` is set to a function."}, code="required" + {"math_property": "This field is required when `math` is set to a function."}, + code="required", ) if entity.math_property == "$session_duration": aggregate_operation = f"{PROPERTY_MATH_FUNCTIONS[entity.math]}(session_duration)" @@ -90,7 +100,10 @@ def process_math( def parse_response( - stats: Dict, filter: Filter, additional_values: Dict = {}, entity: Optional[Entity] = None + stats: Dict, + filter: Filter, + additional_values: Dict = {}, + entity: Optional[Entity] = None, ) -> Dict[str, Any]: counts = stats[1] labels = [item.strftime("%-d-%b-%Y{}".format(" %H:%M" if filter.interval == "hour" else "")) for item in stats[0]] diff --git a/posthog/queries/util.py b/posthog/queries/util.py index ec218785b1dc9..8fdda3799dbd1 100644 --- a/posthog/queries/util.py +++ b/posthog/queries/util.py @@ -155,7 +155,9 @@ def convert_to_datetime_aware(date_obj): def correct_result_for_sampling( - value: Union[int, float], sampling_factor: Optional[float], entity_math: Optional[str] = None + value: Union[int, float], + sampling_factor: Optional[float], + entity_math: Optional[str] = None, ) -> Union[int, float]: from posthog.queries.trends.util import ALL_SUPPORTED_MATH_FUNCTIONS diff --git a/posthog/rate_limit.py b/posthog/rate_limit.py index 587eb742ee4f1..dbaa478d9f462 100644 --- a/posthog/rate_limit.py +++ b/posthog/rate_limit.py @@ -82,7 +82,6 @@ def safely_get_team_id_from_view(view): return None def allow_request(self, request, view): - if not is_rate_limit_enabled(round(time.time() / 60)): return True @@ -114,7 +113,12 @@ def allow_request(self, request, view): statsd.incr( "rate_limit_exceeded", - tags={"team_id": team_id, "scope": scope, "rate": rate, "path": path}, + tags={ + "team_id": team_id, + "scope": scope, + "rate": rate, + "path": path, + }, ) RATE_LIMIT_EXCEEDED_COUNTER.labels(team_id=team_id, scope=scope, path=path).inc() @@ -188,7 +192,6 @@ def safely_get_token_from_request(request: Request) -> Optional[str]: return None def allow_request(self, request, view): - if not is_decide_rate_limit_enabled(): return True diff --git a/posthog/session_recordings/models/session_recording.py b/posthog/session_recordings/models/session_recording.py index b3b09a03d0b74..5ef51b34c2f1b 100644 --- a/posthog/session_recordings/models/session_recording.py +++ b/posthog/session_recordings/models/session_recording.py @@ -12,7 +12,9 @@ RecordingMatchingEvents, RecordingMetadata, ) -from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed +from posthog.session_recordings.models.session_recording_event import ( + SessionRecordingViewed, +) from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents diff --git a/posthog/session_recordings/models/session_recording_playlist.py b/posthog/session_recordings/models/session_recording_playlist.py index a0f11b5718335..9c198dbd83e3c 100644 --- a/posthog/session_recordings/models/session_recording_playlist.py +++ b/posthog/session_recordings/models/session_recording_playlist.py @@ -5,7 +5,6 @@ class SessionRecordingPlaylist(models.Model): - short_id: models.CharField = models.CharField(max_length=12, blank=True, default=generate_short_id) name: models.CharField = models.CharField(max_length=400, null=True, blank=True) derived_name: models.CharField = models.CharField(max_length=400, null=True, blank=True) @@ -18,7 +17,11 @@ class SessionRecordingPlaylist(models.Model): created_by: models.ForeignKey = models.ForeignKey("User", on_delete=models.SET_NULL, null=True, blank=True) last_modified_at: models.DateTimeField = models.DateTimeField(default=timezone.now) last_modified_by: models.ForeignKey = models.ForeignKey( - "User", on_delete=models.SET_NULL, null=True, blank=True, related_name="modified_playlists" + "User", + on_delete=models.SET_NULL, + null=True, + blank=True, + related_name="modified_playlists", ) # DEPRECATED diff --git a/posthog/session_recordings/models/session_recording_playlist_item.py b/posthog/session_recordings/models/session_recording_playlist_item.py index f0caf9721aa7b..73d38815fdef8 100644 --- a/posthog/session_recordings/models/session_recording_playlist_item.py +++ b/posthog/session_recordings/models/session_recording_playlist_item.py @@ -14,7 +14,9 @@ class Meta: blank=True, ) playlist: models.ForeignKey = models.ForeignKey( - "SessionRecordingPlaylist", related_name="playlist_items", on_delete=models.CASCADE + "SessionRecordingPlaylist", + related_name="playlist_items", + on_delete=models.CASCADE, ) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True, blank=True) diff --git a/posthog/session_recordings/models/system_status_queries.py b/posthog/session_recordings/models/system_status_queries.py index f14396bf06270..9728c0695c1fa 100644 --- a/posthog/session_recordings/models/system_status_queries.py +++ b/posthog/session_recordings/models/system_status_queries.py @@ -19,5 +19,7 @@ def get_recording_status_month_to_date() -> RecordingsSystemStatus: """ )[0] return RecordingsSystemStatus( - count=result[0], events=f"{result[1]:,} rrweb events in {result[2]:,} messages", size=result[3] + count=result[0], + events=f"{result[1]:,} rrweb events in {result[2]:,} messages", + size=result[3], ) diff --git a/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py b/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py index b725c95cb658e..c39eee18bf79c 100644 --- a/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py +++ b/posthog/session_recordings/queries/session_recording_list_from_replay_summary.py @@ -117,7 +117,10 @@ def _get_console_log_clause( console_logs_filter: List[Literal["error", "warn", "log"]] ) -> Tuple[str, Dict[str, Any]]: return ( - (f"AND level in %(console_logs_levels)s", {"console_logs_levels": console_logs_filter}) + ( + f"AND level in %(console_logs_levels)s", + {"console_logs_levels": console_logs_filter}, + ) if console_logs_filter else ("", {}) ) @@ -126,7 +129,10 @@ def get_query(self) -> Tuple[str, Dict]: if not self._filter.console_search_query: return "", {} - events_timestamp_clause, events_timestamp_params = self._get_events_timestamp_clause + ( + events_timestamp_clause, + events_timestamp_params, + ) = self._get_events_timestamp_clause console_log_clause, console_log_params = self._get_console_log_clause(self._filter.console_logs_filter) return self._rawQuery.format( @@ -319,9 +325,10 @@ def build_event_filters(self) -> SummaryEventFiltersSQL: if entity.id and entity.id not in event_names_to_filter: event_names_to_filter.append(entity.id) - this_entity_condition_sql, this_entity_filter_params = self.format_event_filter( - entity, prepend=f"event_matcher_{index}", team_id=self._team_id - ) + ( + this_entity_condition_sql, + this_entity_filter_params, + ) = self.format_event_filter(entity, prepend=f"event_matcher_{index}", team_id=self._team_id) joining = "OR" if index > 0 else "" condition_sql += f"{joining} {this_entity_condition_sql}" # wrap in smooths to constrain the scope of the OR @@ -355,7 +362,10 @@ def _get_groups_query(self) -> Tuple[str, Dict]: from posthog.queries.groups_join_query import GroupsJoinQuery return GroupsJoinQuery( - self._filter, self._team_id, self._column_optimizer, person_on_events_mode=self._person_on_events_mode + self._filter, + self._team_id, + self._column_optimizer, + person_on_events_mode=self._person_on_events_mode, ).get_join_query() # We want to select events beyond the range of the recording to handle the case where @@ -382,13 +392,17 @@ def get_query(self, select_event_ids: bool = False) -> Tuple[str, Dict[str, Any] } _, recording_start_time_params = _get_recording_start_time_clause(self._filter) - provided_session_ids_clause, provided_session_ids_params = _get_filter_by_provided_session_ids_clause( - recording_filters=self._filter, column_name="$session_id" - ) + ( + provided_session_ids_clause, + provided_session_ids_params, + ) = _get_filter_by_provided_session_ids_clause(recording_filters=self._filter, column_name="$session_id") event_filters = self.build_event_filters event_filters_params = event_filters.params - events_timestamp_clause, events_timestamp_params = self._get_events_timestamp_clause + ( + events_timestamp_clause, + events_timestamp_params, + ) = self._get_events_timestamp_clause groups_query, groups_params = self._get_groups_query() @@ -407,9 +421,11 @@ def get_query(self, select_event_ids: bool = False) -> Tuple[str, Dict[str, Any] person_id_joined_alias=f"{self.DISTINCT_ID_TABLE_ALIAS}.person_id", ) - persons_join, persons_select_params, persons_sub_query = self._persons_join_or_subquery( - event_filters, prop_query - ) + ( + persons_join, + persons_select_params, + persons_sub_query, + ) = self._persons_join_or_subquery(event_filters, prop_query) return ( self._raw_events_query.format( @@ -590,9 +606,10 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: } _, recording_start_time_params = _get_recording_start_time_clause(self._filter) - provided_session_ids_clause, provided_session_ids_params = _get_filter_by_provided_session_ids_clause( - recording_filters=self._filter - ) + ( + provided_session_ids_clause, + provided_session_ids_params, + ) = _get_filter_by_provided_session_ids_clause(recording_filters=self._filter) ( log_matching_session_ids_clause, @@ -636,7 +653,8 @@ def get_query(self) -> Tuple[str, Dict[str, Any]]: ) def duration_clause( - self, duration_filter_type: Literal["duration", "active_seconds", "inactive_seconds"] + self, + duration_filter_type: Literal["duration", "active_seconds", "inactive_seconds"], ) -> Tuple[str, Dict[str, Any]]: duration_clause = "" duration_params = {} diff --git a/posthog/session_recordings/queries/session_recording_properties.py b/posthog/session_recordings/queries/session_recording_properties.py index 22d54e9799b4d..e7c5544f14fe7 100644 --- a/posthog/session_recordings/queries/session_recording_properties.py +++ b/posthog/session_recordings/queries/session_recording_properties.py @@ -73,12 +73,19 @@ def format_session_recording_id_filters(self) -> Tuple[str, Dict]: def get_query(self) -> Tuple[str, Dict[str, Any]]: base_params = {"team_id": self._team_id} - events_timestamp_clause, events_timestamp_params = self._get_events_timestamp_clause() - session_ids_clause, session_ids_params = self.format_session_recording_id_filters() + ( + events_timestamp_clause, + events_timestamp_params, + ) = self._get_events_timestamp_clause() + ( + session_ids_clause, + session_ids_params, + ) = self.format_session_recording_id_filters() return ( self._core_single_pageview_event_query.format( - events_timestamp_clause=events_timestamp_clause, session_ids_clause=session_ids_clause + events_timestamp_clause=events_timestamp_clause, + session_ids_clause=session_ids_clause, ), {**base_params, **events_timestamp_params, **session_ids_params}, ) diff --git a/posthog/session_recordings/queries/session_replay_events.py b/posthog/session_recordings/queries/session_replay_events.py index 02c2a26519c21..0d60559c7a047 100644 --- a/posthog/session_recordings/queries/session_replay_events.py +++ b/posthog/session_recordings/queries/session_replay_events.py @@ -25,12 +25,19 @@ def exists(self, session_id: str, team: Team) -> bool: AND session_id = %(session_id)s AND min_first_timestamp >= now() - INTERVAL %(recording_ttl_days)s DAY """, - {"team_id": team.pk, "session_id": session_id, "recording_ttl_days": ttl_days(team)}, + { + "team_id": team.pk, + "session_id": session_id, + "recording_ttl_days": ttl_days(team), + }, ) return result[0][0] > 0 def get_metadata( - self, session_id: str, team: Team, recording_start_time: Optional[datetime] = None + self, + session_id: str, + team: Team, + recording_start_time: Optional[datetime] = None, ) -> Optional[RecordingMetadata]: query = """ SELECT @@ -63,7 +70,11 @@ def get_metadata( replay_response: List[Tuple] = sync_execute( query, - {"team_id": team.pk, "session_id": session_id, "recording_start_time": recording_start_time}, + { + "team_id": team.pk, + "session_id": session_id, + "recording_start_time": recording_start_time, + }, ) if len(replay_response) == 0: diff --git a/posthog/session_recordings/queries/test/session_replay_sql.py b/posthog/session_recordings/queries/test/session_replay_sql.py index fcc3eee03a44e..3b094a5a75c8c 100644 --- a/posthog/session_recordings/queries/test/session_replay_sql.py +++ b/posthog/session_recordings/queries/test/session_replay_sql.py @@ -7,7 +7,10 @@ from posthog.clickhouse.log_entries import INSERT_LOG_ENTRY_SQL from posthog.kafka_client.client import ClickhouseProducer -from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS, KAFKA_LOG_ENTRIES +from posthog.kafka_client.topics import ( + KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS, + KAFKA_LOG_ENTRIES, +) from posthog.models.event.util import format_clickhouse_timestamp from posthog.utils import cast_timestamp_or_now @@ -134,7 +137,11 @@ def produce_replay_summary( } p = ClickhouseProducer() # because this is in a test it will write directly using SQL not really with Kafka - p.produce(topic=KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS, sql=INSERT_SINGLE_SESSION_REPLAY, data=data) + p.produce( + topic=KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS, + sql=INSERT_SINGLE_SESSION_REPLAY, + data=data, + ) for level, messages in log_messages.items(): for message in messages: diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py index 9424a9df2a51c..f70f86fdba3cf 100644 --- a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py +++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py @@ -14,13 +14,17 @@ from posthog.models.action_step import ActionStep from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.group.util import create_group -from posthog.session_recordings.sql.session_replay_event_sql import TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL +from posthog.session_recordings.sql.session_replay_event_sql import ( + TRUNCATE_SESSION_REPLAY_EVENTS_TABLE_SQL, +) from posthog.models.team import Team from posthog.session_recordings.queries.session_recording_list_from_replay_summary import ( SessionRecordingListFromReplaySummary, ) from posthog.session_recordings.queries.session_replay_events import ttl_days -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -64,7 +68,11 @@ def create_event( if properties is None: properties = {"$os": "Windows 95", "$current_url": "aloha.com/2"} return _create_event( - team=team, event=event_name, timestamp=timestamp, distinct_id=distinct_id, properties=properties + team=team, + event=event_name, + timestamp=timestamp, + distinct_id=distinct_id, + properties=properties, ) @property @@ -123,7 +131,10 @@ def test_basic_query(self): filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None}) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert session_recordings == [ { @@ -225,7 +236,10 @@ def test_basic_query_active_sessions( }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert sorted( [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings], @@ -243,7 +257,10 @@ def test_basic_query_active_sessions( }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ (session_id_active_is_61, 59, 61.0) @@ -257,7 +274,10 @@ def test_basic_query_active_sessions( }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert [(s["session_id"], s["duration"], s["inactive_seconds"]) for s in session_recordings] == [ (session_id_inactive_is_61, 61, 61.0) @@ -315,7 +335,10 @@ def test_basic_query_with_paging(self): filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None, "limit": 1, "offset": 0}) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert session_recordings == [ { @@ -341,7 +364,10 @@ def test_basic_query_with_paging(self): filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None, "limit": 1, "offset": 1}) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert session_recordings == [ { @@ -367,7 +393,10 @@ def test_basic_query_with_paging(self): filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None, "limit": 1, "offset": 2}) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert session_recordings == [] @@ -479,7 +508,10 @@ def test_first_url_selection(self): filter = SessionRecordingsFilter(team=self.team, data={"no_filter": None}) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert sorted( [{"session_id": r["session_id"], "first_url": r["first_url"]} for r in session_recordings], @@ -561,7 +593,11 @@ def test_event_filter(self): first_timestamp=self.base_time, team_id=self.team.id, ) - self.create_event(user, self.base_time, properties={"$session_id": session_id_one, "$window_id": str(uuid4())}) + self.create_event( + user, + self.base_time, + properties={"$session_id": session_id_one, "$window_id": str(uuid4())}, + ) produce_replay_summary( distinct_id=user, session_id=session_id_one, @@ -571,7 +607,16 @@ def test_event_filter(self): filter = SessionRecordingsFilter( team=self.team, - data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]}, + data={ + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -580,7 +625,16 @@ def test_event_filter(self): filter = SessionRecordingsFilter( team=self.team, - data={"events": [{"id": "$autocapture", "type": "events", "order": 0, "name": "$autocapture"}]}, + data={ + "events": [ + { + "id": "$autocapture", + "type": "events", + "order": 0, + "name": "$autocapture", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -609,7 +663,16 @@ def test_event_filter_has_ttl_applied_too(self): filter = SessionRecordingsFilter( team=self.team, - data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]}, + data={ + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -654,7 +717,12 @@ def test_event_filter_with_active_sessions( session_id_active_is_61 = f"test_basic_query_active_sessions-active-{str(uuid4())}" self.create_event( - user, self.base_time, properties={"$session_id": session_id_total_is_61, "$window_id": str(uuid4())} + user, + self.base_time, + properties={ + "$session_id": session_id_total_is_61, + "$window_id": str(uuid4()), + }, ) produce_replay_summary( session_id=session_id_total_is_61, @@ -671,7 +739,12 @@ def test_event_filter_with_active_sessions( ) self.create_event( - user, self.base_time, properties={"$session_id": session_id_active_is_61, "$window_id": str(uuid4())} + user, + self.base_time, + properties={ + "$session_id": session_id_active_is_61, + "$window_id": str(uuid4()), + }, ) produce_replay_summary( session_id=session_id_active_is_61, @@ -691,12 +764,22 @@ def test_event_filter_with_active_sessions( team=self.team, data={ "duration_type_filter": "duration", - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ (session_id_total_is_61, 61, 59.0) @@ -706,12 +789,22 @@ def test_event_filter_with_active_sessions( team=self.team, data={ "duration_type_filter": "active_seconds", - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) - (session_recordings, more_recordings_available) = session_recording_list_instance.run() + ( + session_recordings, + more_recordings_available, + ) = session_recording_list_instance.run() assert [(s["session_id"], s["duration"], s["active_seconds"]) for s in session_recordings] == [ (session_id_active_is_61, 59, 61.0) @@ -732,7 +825,11 @@ def test_event_filter_with_properties(self): self.create_event( user, self.base_time, - properties={"$browser": "Chrome", "$session_id": session_id_one, "$window_id": str(uuid4())}, + properties={ + "$browser": "Chrome", + "$session_id": session_id_one, + "$window_id": str(uuid4()), + }, ) produce_replay_summary( distinct_id=user, @@ -749,7 +846,14 @@ def test_event_filter_with_properties(self): "type": "events", "order": 0, "name": "$pageview", - "properties": [{"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "event", + } + ], } ] }, @@ -768,7 +872,14 @@ def test_event_filter_with_properties(self): "type": "events", "order": 0, "name": "$pageview", - "properties": [{"key": "$browser", "value": ["Firefox"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "$browser", + "value": ["Firefox"], + "operator": "exact", + "type": "event", + } + ], } ] }, @@ -783,12 +894,22 @@ def test_multiple_event_filters(self): user = "test_multiple_event_filters-user" Person.objects.create(team=self.team, distinct_ids=[user], properties={"email": "bla"}) produce_replay_summary( - distinct_id=user, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id + distinct_id=user, + session_id=session_id, + first_timestamp=self.base_time, + team_id=self.team.id, ) - self.create_event(user, self.base_time, properties={"$session_id": session_id, "$window_id": "1"}) self.create_event( - user, self.base_time, properties={"$session_id": session_id, "$window_id": "1"}, event_name="new-event" + user, + self.base_time, + properties={"$session_id": session_id, "$window_id": "1"}, + ) + self.create_event( + user, + self.base_time, + properties={"$session_id": session_id, "$window_id": "1"}, + event_name="new-event", ) produce_replay_summary( distinct_id=user, @@ -801,8 +922,18 @@ def test_multiple_event_filters(self): team=self.team, data={ "events": [ - {"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}, - {"id": "new-event", "type": "events", "order": 0, "name": "new-event"}, + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "new-event", + "type": "events", + "order": 0, + "name": "new-event", + }, ] }, ) @@ -817,8 +948,18 @@ def test_multiple_event_filters(self): team=self.team, data={ "events": [ - {"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}, - {"id": "new-event2", "type": "events", "order": 0, "name": "new-event2"}, + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "new-event2", + "type": "events", + "order": 0, + "name": "new-event2", + }, ] }, ) @@ -844,7 +985,10 @@ def test_action_filter(self): ) action_without_properties = self.create_action( name="custom-event", - properties=[{"key": "$session_id", "value": session_id_one}, {"key": "$window_id", "value": window_id}], + properties=[ + {"key": "$session_id", "value": session_id_one}, + {"key": "$window_id", "value": window_id}, + ], ) produce_replay_summary( @@ -857,7 +1001,11 @@ def test_action_filter(self): user, self.base_time, event_name="custom-event", - properties={"$browser": "Chrome", "$session_id": session_id_one, "$window_id": window_id}, + properties={ + "$browser": "Chrome", + "$session_id": session_id_one, + "$window_id": window_id, + }, ) produce_replay_summary( distinct_id=user, @@ -869,7 +1017,14 @@ def test_action_filter(self): filter = SessionRecordingsFilter( team=self.team, data={ - "actions": [{"id": action_with_properties.id, "type": "actions", "order": 1, "name": "custom-event"}] + "actions": [ + { + "id": action_with_properties.id, + "type": "actions", + "order": 1, + "name": "custom-event", + } + ] }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -879,7 +1034,14 @@ def test_action_filter(self): filter = SessionRecordingsFilter( team=self.team, data={ - "actions": [{"id": action_without_properties.id, "type": "actions", "order": 1, "name": "custom-event"}] + "actions": [ + { + "id": action_without_properties.id, + "type": "actions", + "order": 1, + "name": "custom-event", + } + ] }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -898,7 +1060,14 @@ def test_action_filter(self): "type": "actions", "order": 1, "name": "custom-event", - "properties": [{"key": "$browser", "value": ["Firefox"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "$browser", + "value": ["Firefox"], + "operator": "exact", + "type": "event", + } + ], } ] }, @@ -917,7 +1086,14 @@ def test_action_filter(self): "type": "actions", "order": 1, "name": "custom-event", - "properties": [{"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "event", + } + ], } ] }, @@ -941,7 +1117,11 @@ def test_all_sessions_recording_object_keys_with_entity_filter(self): last_timestamp=(self.base_time + relativedelta(seconds=60)), team_id=self.team.id, ) - self.create_event(user, self.base_time, properties={"$session_id": session_id, "$window_id": window_id}) + self.create_event( + user, + self.base_time, + properties={"$session_id": session_id, "$window_id": window_id}, + ) produce_replay_summary( distinct_id=user, session_id=session_id, @@ -951,7 +1131,16 @@ def test_all_sessions_recording_object_keys_with_entity_filter(self): ) filter = SessionRecordingsFilter( team=self.team, - data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]}, + data={ + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1056,7 +1245,8 @@ def test_date_from_filter(self): assert session_recordings == [] filter = SessionRecordingsFilter( - team=self.team, data={"date_from": (self.base_time - relativedelta(days=2)).strftime("%Y-%m-%d")} + team=self.team, + data={"date_from": (self.base_time - relativedelta(days=2)).strftime("%Y-%m-%d")}, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1086,7 +1276,8 @@ def test_date_from_filter_cannot_search_before_ttl(self): ) filter = SessionRecordingsFilter( - team=self.team, data={"date_from": (self.base_time - relativedelta(days=20)).strftime("%Y-%m-%d")} + team=self.team, + data={"date_from": (self.base_time - relativedelta(days=20)).strftime("%Y-%m-%d")}, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1094,7 +1285,8 @@ def test_date_from_filter_cannot_search_before_ttl(self): assert session_recordings[0]["session_id"] == "storage is not past ttl" filter = SessionRecordingsFilter( - team=self.team, data={"date_from": (self.base_time - relativedelta(days=21)).strftime("%Y-%m-%d")} + team=self.team, + data={"date_from": (self.base_time - relativedelta(days=21)).strftime("%Y-%m-%d")}, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1102,7 +1294,8 @@ def test_date_from_filter_cannot_search_before_ttl(self): assert session_recordings[0]["session_id"] == "storage is not past ttl" filter = SessionRecordingsFilter( - team=self.team, data={"date_from": (self.base_time - relativedelta(days=22)).strftime("%Y-%m-%d")} + team=self.team, + data={"date_from": (self.base_time - relativedelta(days=22)).strftime("%Y-%m-%d")}, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1129,14 +1322,16 @@ def test_date_to_filter(self): ) filter = SessionRecordingsFilter( - team=self.team, data={"date_to": (self.base_time - relativedelta(days=4)).strftime("%Y-%m-%d")} + team=self.team, + data={"date_to": (self.base_time - relativedelta(days=4)).strftime("%Y-%m-%d")}, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() assert session_recordings == [] filter = SessionRecordingsFilter( - team=self.team, data={"date_to": (self.base_time - relativedelta(days=3)).strftime("%Y-%m-%d")} + team=self.team, + data={"date_to": (self.base_time - relativedelta(days=3)).strftime("%Y-%m-%d")}, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1176,9 +1371,15 @@ def test_person_id_filter(self): session_id_one = f"test_person_id_filter-{str(uuid4())}" session_id_two = f"test_person_id_filter-{str(uuid4())}" p = Person.objects.create( - team=self.team, distinct_ids=[three_user_ids[0], three_user_ids[1]], properties={"email": "bla"} + team=self.team, + distinct_ids=[three_user_ids[0], three_user_ids[1]], + properties={"email": "bla"}, + ) + produce_replay_summary( + distinct_id=three_user_ids[0], + session_id=session_id_one, + team_id=self.team.id, ) - produce_replay_summary(distinct_id=three_user_ids[0], session_id=session_id_one, team_id=self.team.id) produce_replay_summary( distinct_id=three_user_ids[1], session_id=session_id_two, @@ -1201,7 +1402,9 @@ def test_all_filters_at_once(self): target_session_id = f"test_all_filters_at_once-{str(uuid4())}" p = Person.objects.create( - team=self.team, distinct_ids=[three_user_ids[0], three_user_ids[1]], properties={"email": "bla"} + team=self.team, + distinct_ids=[three_user_ids[0], three_user_ids[1]], + properties={"email": "bla"}, ) custom_event_action = self.create_action(name="custom-event") @@ -1219,7 +1422,9 @@ def test_all_filters_at_once(self): team_id=self.team.id, ) self.create_event( - three_user_ids[0], self.base_time - relativedelta(days=3), properties={"$session_id": target_session_id} + three_user_ids[0], + self.base_time - relativedelta(days=3), + properties={"$session_id": target_session_id}, ) self.create_event( three_user_ids[0], @@ -1250,8 +1455,22 @@ def test_all_filters_at_once(self): "date_to": (self.base_time + relativedelta(days=3)).strftime("%Y-%m-%d"), "date_from": (self.base_time - relativedelta(days=10)).strftime("%Y-%m-%d"), "session_recording_duration": '{"type":"recording","key":"duration","value":60,"operator":"gt"}', - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], - "actions": [{"id": custom_event_action.id, "type": "actions", "order": 1, "name": "custom-event"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], + "actions": [ + { + "id": custom_event_action.id, + "type": "actions", + "order": 1, + "name": "custom-event", + } + ], }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -1264,7 +1483,10 @@ def test_teams_dont_leak_event_filter(self): session_id = f"test_teams_dont_leak_event_filter-{str(uuid4())}" produce_replay_summary( - distinct_id=user, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id + distinct_id=user, + session_id=session_id, + first_timestamp=self.base_time, + team_id=self.team.id, ) self.create_event(1, self.base_time + relativedelta(seconds=15), team=another_team) produce_replay_summary( @@ -1276,7 +1498,16 @@ def test_teams_dont_leak_event_filter(self): filter = SessionRecordingsFilter( team=self.team, - data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]}, + data={ + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -1295,7 +1526,10 @@ def test_event_filter_with_person_properties(self): Person.objects.create(team=self.team, distinct_ids=[user_two], properties={"email": "bla2"}) produce_replay_summary( - distinct_id=user_one, session_id=session_id_one, first_timestamp=self.base_time, team_id=self.team.id + distinct_id=user_one, + session_id=session_id_one, + first_timestamp=self.base_time, + team_id=self.team.id, ) produce_replay_summary( distinct_id=user_one, @@ -1304,7 +1538,10 @@ def test_event_filter_with_person_properties(self): team_id=self.team.id, ) produce_replay_summary( - distinct_id=user_two, session_id=session_id_two, first_timestamp=self.base_time, team_id=self.team.id + distinct_id=user_two, + session_id=session_id_two, + first_timestamp=self.base_time, + team_id=self.team.id, ) produce_replay_summary( distinct_id=user_two, @@ -1315,7 +1552,16 @@ def test_event_filter_with_person_properties(self): filter = SessionRecordingsFilter( team=self.team, - data={"properties": [{"key": "email", "value": ["bla"], "operator": "exact", "type": "person"}]}, + data={ + "properties": [ + { + "key": "email", + "value": ["bla"], + "operator": "exact", + "type": "person", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -1336,12 +1582,24 @@ def test_filter_with_cohort_properties(self): Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) Person.objects.create( - team=self.team, distinct_ids=[user_two], properties={"email": "bla2", "$some_prop": "some_val"} + team=self.team, + distinct_ids=[user_two], + properties={"email": "bla2", "$some_prop": "some_val"}, ) cohort = Cohort.objects.create( team=self.team, name="cohort1", - groups=[{"properties": [{"key": "$some_prop", "value": "some_val", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "$some_prop", + "value": "some_val", + "type": "person", + } + ] + } + ], ) cohort.calculate_people_ch(pending_version=0) @@ -1373,7 +1631,16 @@ def test_filter_with_cohort_properties(self): ) filter = SessionRecordingsFilter( team=self.team, - data={"properties": [{"key": "id", "value": cohort.pk, "operator": None, "type": "cohort"}]}, + data={ + "properties": [ + { + "key": "id", + "value": cohort.pk, + "operator": None, + "type": "cohort", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1393,12 +1660,24 @@ def test_filter_with_events_and_cohorts(self): Person.objects.create(team=self.team, distinct_ids=[user_one], properties={"email": "bla"}) Person.objects.create( - team=self.team, distinct_ids=[user_two], properties={"email": "bla2", "$some_prop": "some_val"} + team=self.team, + distinct_ids=[user_two], + properties={"email": "bla2", "$some_prop": "some_val"}, ) cohort = Cohort.objects.create( team=self.team, name="cohort1", - groups=[{"properties": [{"key": "$some_prop", "value": "some_val", "type": "person"}]}], + groups=[ + { + "properties": [ + { + "key": "$some_prop", + "value": "some_val", + "type": "person", + } + ] + } + ], ) cohort.calculate_people_ch(pending_version=0) @@ -1446,8 +1725,22 @@ def test_filter_with_events_and_cohorts(self): data={ # has to be in the cohort and pageview has to be in the events # test data has one user in the cohort but no pageviews - "properties": [{"key": "id", "value": cohort.pk, "operator": None, "type": "cohort"}], - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "properties": [ + { + "key": "id", + "value": cohort.pk, + "operator": None, + "type": "cohort", + } + ], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -1458,8 +1751,22 @@ def test_filter_with_events_and_cohorts(self): filter = SessionRecordingsFilter( team=self.team, data={ - "properties": [{"key": "id", "value": cohort.pk, "operator": None, "type": "cohort"}], - "events": [{"id": "custom_event", "type": "events", "order": 0, "name": "custom_event"}], + "properties": [ + { + "key": "id", + "value": cohort.pk, + "operator": None, + "type": "cohort", + } + ], + "events": [ + { + "id": "custom_event", + "type": "events", + "order": 0, + "name": "custom_event", + } + ], }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -1476,14 +1783,23 @@ def test_event_filter_with_matching_on_session_id(self): session_id = f"test_event_filter_with_matching_on_session_id-1-{str(uuid4())}" self.create_event( - user_distinct_id, self.base_time, event_name="$pageview", properties={"$session_id": session_id} + user_distinct_id, + self.base_time, + event_name="$pageview", + properties={"$session_id": session_id}, ) self.create_event( - user_distinct_id, self.base_time, event_name="$autocapture", properties={"$session_id": str(uuid4())} + user_distinct_id, + self.base_time, + event_name="$autocapture", + properties={"$session_id": str(uuid4())}, ) produce_replay_summary( - distinct_id=user_distinct_id, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id + distinct_id=user_distinct_id, + session_id=session_id, + first_timestamp=self.base_time, + team_id=self.team.id, ) produce_replay_summary( distinct_id=user_distinct_id, @@ -1494,7 +1810,16 @@ def test_event_filter_with_matching_on_session_id(self): filter = SessionRecordingsFilter( team=self.team, - data={"events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}]}, + data={ + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1504,7 +1829,16 @@ def test_event_filter_with_matching_on_session_id(self): filter = SessionRecordingsFilter( team=self.team, - data={"events": [{"id": "$autocapture", "type": "events", "order": 0, "name": "$autocapture"}]}, + data={ + "events": [ + { + "id": "$autocapture", + "type": "events", + "order": 0, + "name": "$autocapture", + } + ] + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() @@ -1521,11 +1855,18 @@ def test_event_filter_with_hogql_properties(self): self.create_event( user, self.base_time, - properties={"$browser": "Chrome", "$session_id": session_id, "$window_id": str(uuid4())}, + properties={ + "$browser": "Chrome", + "$session_id": session_id, + "$window_id": str(uuid4()), + }, ) produce_replay_summary( - distinct_id=user, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id + distinct_id=user, + session_id=session_id, + first_timestamp=self.base_time, + team_id=self.team.id, ) produce_replay_summary( distinct_id=user, @@ -1586,11 +1927,18 @@ def test_event_filter_with_hogql_person_properties(self): self.create_event( user, self.base_time, - properties={"$browser": "Chrome", "$session_id": session_id, "$window_id": str(uuid4())}, + properties={ + "$browser": "Chrome", + "$session_id": session_id, + "$window_id": str(uuid4()), + }, ) produce_replay_summary( - distinct_id=user, session_id=session_id, first_timestamp=self.base_time, team_id=self.team.id + distinct_id=user, + session_id=session_id, + first_timestamp=self.base_time, + team_id=self.team.id, ) produce_replay_summary( distinct_id=user, @@ -1609,7 +1957,10 @@ def test_event_filter_with_hogql_person_properties(self): "order": 0, "name": "$pageview", "properties": [ - {"key": "person.properties.email == 'bla'", "type": "hogql"}, + { + "key": "person.properties.email == 'bla'", + "type": "hogql", + }, ], } ] @@ -1631,7 +1982,10 @@ def test_event_filter_with_hogql_person_properties(self): "order": 0, "name": "$pageview", "properties": [ - {"key": "person.properties.email == 'something else'", "type": "hogql"}, + { + "key": "person.properties.email == 'something else'", + "type": "hogql", + }, ], } ] @@ -1656,21 +2010,33 @@ def test_any_event_filter_with_properties(self): self.create_event( "user", self.base_time, - properties={"$browser": "Chrome", "$session_id": page_view_session_id, "$window_id": "1"}, + properties={ + "$browser": "Chrome", + "$session_id": page_view_session_id, + "$window_id": "1", + }, event_name="$pageview", ) self.create_event( "user", self.base_time, - properties={"$browser": "Chrome", "$session_id": my_custom_event_session_id, "$window_id": "1"}, + properties={ + "$browser": "Chrome", + "$session_id": my_custom_event_session_id, + "$window_id": "1", + }, event_name="my-custom-event", ) self.create_event( "user", self.base_time, - properties={"$browser": "Safari", "$session_id": non_matching__event_session_id, "$window_id": "1"}, + properties={ + "$browser": "Safari", + "$session_id": non_matching__event_session_id, + "$window_id": "1", + }, event_name="my-non-matching-event", ) @@ -1727,7 +2093,14 @@ def test_any_event_filter_with_properties(self): "type": "events", "order": 0, "name": "All events", - "properties": [{"key": "$browser", "value": ["Chrome"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "$browser", + "value": ["Chrome"], + "operator": "exact", + "type": "event", + } + ], } ] }, @@ -1749,7 +2122,14 @@ def test_any_event_filter_with_properties(self): "type": "events", "order": 0, "name": "All events", - "properties": [{"key": "$browser", "value": ["Firefox"], "operator": "exact", "type": "event"}], + "properties": [ + { + "key": "$browser", + "value": ["Firefox"], + "operator": "exact", + "type": "event", + } + ], } ] }, @@ -1990,7 +2370,14 @@ def test_filter_for_recordings_by_console_text(self): first_timestamp=self.base_time, team_id=self.team.id, console_log_count=4, - log_messages={"log": ["log message 1", "log message 2", "log message 3", "log message 4"]}, + log_messages={ + "log": [ + "log message 1", + "log message 2", + "log message 3", + "log message 4", + ] + }, ) produce_replay_summary( distinct_id="user", @@ -1999,7 +2386,13 @@ def test_filter_for_recordings_by_console_text(self): team_id=self.team.id, console_warn_count=5, log_messages={ - "warn": ["warn message 1", "warn message 2", "warn message 3", "warn message 4", "warn message 5"] + "warn": [ + "warn message 1", + "warn message 2", + "warn message 3", + "warn message 4", + "warn message 5", + ] }, ) produce_replay_summary( @@ -2008,7 +2401,14 @@ def test_filter_for_recordings_by_console_text(self): first_timestamp=self.base_time, team_id=self.team.id, console_error_count=4, - log_messages={"error": ["error message 1", "error message 2", "error message 3", "error message 4"]}, + log_messages={ + "error": [ + "error message 1", + "error message 2", + "error message 3", + "error message 4", + ] + }, ) produce_replay_summary( distinct_id="user", @@ -2018,7 +2418,12 @@ def test_filter_for_recordings_by_console_text(self): console_error_count=4, console_log_count=3, log_messages={ - "error": ["error message 1", "error message 2", "error message 3", "error message 4"], + "error": [ + "error message 1", + "error message 2", + "error message 3", + "error message 4", + ], "log": ["log message 1", "log message 2", "log message 3"], }, ) @@ -2026,7 +2431,10 @@ def test_filter_for_recordings_by_console_text(self): filter = SessionRecordingsFilter( team=self.team, # there are 5 warn and 4 error logs, message 4 matches in both - data={"console_logs": ["warn", "error"], "console_search_query": "message 4"}, + data={ + "console_logs": ["warn", "error"], + "console_search_query": "message 4", + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -2043,7 +2451,10 @@ def test_filter_for_recordings_by_console_text(self): filter = SessionRecordingsFilter( team=self.team, # there are 5 warn and 4 error logs, message 5 matches only matches in warn - data={"console_logs": ["warn", "error"], "console_search_query": "message 5"}, + data={ + "console_logs": ["warn", "error"], + "console_search_query": "message 5", + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -2058,7 +2469,10 @@ def test_filter_for_recordings_by_console_text(self): filter = SessionRecordingsFilter( team=self.team, # match is case-insensitive - data={"console_logs": ["warn", "error"], "console_search_query": "MESSAGE 5"}, + data={ + "console_logs": ["warn", "error"], + "console_search_query": "MESSAGE 5", + }, ) session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) @@ -2083,14 +2497,26 @@ def test_filter_for_recordings_by_console_text(self): assert sorted([sr["session_id"] for sr in session_recordings]) == sorted([]) @also_test_with_materialized_columns( - event_properties=["is_internal_user"], person_properties=["email"], verify_no_jsonextract=False + event_properties=["is_internal_user"], + person_properties=["email"], + verify_no_jsonextract=False, ) @freeze_time("2021-01-21T20:00:00.000Z") @snapshot_clickhouse_queries def test_event_filter_with_test_accounts_excluded(self): self.team.test_account_filters = [ - {"key": "email", "value": "@posthog.com", "operator": "not_icontains", "type": "person"}, - {"key": "is_internal_user", "value": ["false"], "operator": "exact", "type": "event"}, + { + "key": "email", + "value": "@posthog.com", + "operator": "not_icontains", + "type": "person", + }, + { + "key": "is_internal_user", + "value": ["false"], + "operator": "exact", + "type": "event", + }, {"key": "properties.$browser == 'Chrome'", "type": "hogql"}, ] self.team.save() @@ -2106,7 +2532,11 @@ def test_event_filter_with_test_accounts_excluded(self): self.create_event( "user", self.base_time, - properties={"$session_id": "1", "$window_id": "1", "is_internal_user": "true"}, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": "true", + }, ) produce_replay_summary( distinct_id="user", @@ -2118,7 +2548,14 @@ def test_event_filter_with_test_accounts_excluded(self): filter = SessionRecordingsFilter( team=self.team, data={ - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "filter_test_accounts": True, }, ) @@ -2129,7 +2566,14 @@ def test_event_filter_with_test_accounts_excluded(self): filter = SessionRecordingsFilter( team=self.team, data={ - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "filter_test_accounts": False, }, ) @@ -2138,7 +2582,9 @@ def test_event_filter_with_test_accounts_excluded(self): self.assertEqual(len(session_recordings), 1) @also_test_with_materialized_columns( - event_properties=["$browser"], person_properties=["email"], verify_no_jsonextract=False + event_properties=["$browser"], + person_properties=["email"], + verify_no_jsonextract=False, ) @freeze_time("2021-01-21T20:00:00.000Z") @snapshot_clickhouse_queries @@ -2149,7 +2595,11 @@ def test_event_filter_with_hogql_event_properties_test_accounts_excluded(self): self.team.save() Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) produce_replay_summary( distinct_id="user", @@ -2186,7 +2636,14 @@ def test_event_filter_with_hogql_event_properties_test_accounts_excluded(self): team=self.team, data={ # pageview that matches the hogql test_accounts filter - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "filter_test_accounts": False, }, ) @@ -2203,7 +2660,14 @@ def test_event_filter_with_hogql_event_properties_test_accounts_excluded(self): team=self.team, data={ # only 1 pageview that matches the hogql test_accounts filter - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "filter_test_accounts": True, }, ) @@ -2239,12 +2703,21 @@ def test_top_level_event_property_test_account_filter(self): The filter wasn't triggering the "should join events check", and so we didn't apply the filter at all """ self.team.test_account_filters = [ - {"key": "is_internal_user", "value": ["false"], "operator": "exact", "type": "event"}, + { + "key": "is_internal_user", + "value": ["false"], + "operator": "exact", + "type": "event", + }, ] self.team.save() Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) produce_replay_summary( distinct_id="user", @@ -2255,7 +2728,11 @@ def test_top_level_event_property_test_account_filter(self): self.create_event( "user", self.base_time, - properties={"$session_id": "1", "$window_id": "1", "is_internal_user": False}, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, ) produce_replay_summary( distinct_id="user", @@ -2273,7 +2750,11 @@ def test_top_level_event_property_test_account_filter(self): self.create_event( "user2", self.base_time, - properties={"$session_id": "2", "$window_id": "1", "is_internal_user": True}, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, ) # there are 2 pageviews @@ -2281,7 +2762,14 @@ def test_top_level_event_property_test_account_filter(self): team=self.team, data={ # pageview that matches the hogql test_accounts filter - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "filter_test_accounts": False, }, ) @@ -2316,7 +2804,11 @@ def test_top_level_hogql_event_property_test_account_filter(self): self.team.save() Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) produce_replay_summary( distinct_id="user", @@ -2327,7 +2819,11 @@ def test_top_level_hogql_event_property_test_account_filter(self): self.create_event( "user", self.base_time, - properties={"$session_id": "1", "$window_id": "1", "is_internal_user": False}, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, ) produce_replay_summary( distinct_id="user", @@ -2345,7 +2841,11 @@ def test_top_level_hogql_event_property_test_account_filter(self): self.create_event( "user2", self.base_time, - properties={"$session_id": "2", "$window_id": "1", "is_internal_user": True}, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, ) # there are 2 pageviews @@ -2353,7 +2853,14 @@ def test_top_level_hogql_event_property_test_account_filter(self): team=self.team, data={ # pageview that matches the hogql test_accounts filter - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "filter_test_accounts": False, }, ) @@ -2388,7 +2895,11 @@ def test_top_level_hogql_person_property_test_account_filter(self): self.team.save() Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) produce_replay_summary( distinct_id="user", @@ -2399,7 +2910,11 @@ def test_top_level_hogql_person_property_test_account_filter(self): self.create_event( "user", self.base_time, - properties={"$session_id": "1", "$window_id": "1", "is_internal_user": False}, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, ) produce_replay_summary( distinct_id="user", @@ -2417,7 +2932,11 @@ def test_top_level_hogql_person_property_test_account_filter(self): self.create_event( "user2", self.base_time, - properties={"$session_id": "2", "$window_id": "1", "is_internal_user": True}, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, ) # there are 2 pageviews @@ -2425,7 +2944,14 @@ def test_top_level_hogql_person_property_test_account_filter(self): team=self.team, data={ # pageview that matches the hogql test_accounts filter - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "filter_test_accounts": False, }, ) @@ -2458,7 +2984,11 @@ def test_top_level_person_property_test_account_filter(self): self.team.save() Person.objects.create(team=self.team, distinct_ids=["user"], properties={"email": "bla"}) - Person.objects.create(team=self.team, distinct_ids=["user2"], properties={"email": "not-the-other-one"}) + Person.objects.create( + team=self.team, + distinct_ids=["user2"], + properties={"email": "not-the-other-one"}, + ) produce_replay_summary( distinct_id="user", @@ -2469,7 +2999,11 @@ def test_top_level_person_property_test_account_filter(self): self.create_event( "user", self.base_time, - properties={"$session_id": "1", "$window_id": "1", "is_internal_user": False}, + properties={ + "$session_id": "1", + "$window_id": "1", + "is_internal_user": False, + }, ) produce_replay_summary( distinct_id="user", @@ -2487,7 +3021,11 @@ def test_top_level_person_property_test_account_filter(self): self.create_event( "user2", self.base_time, - properties={"$session_id": "2", "$window_id": "1", "is_internal_user": True}, + properties={ + "$session_id": "2", + "$window_id": "1", + "is_internal_user": True, + }, ) # there are 2 pageviews @@ -2495,7 +3033,14 @@ def test_top_level_person_property_test_account_filter(self): team=self.team, data={ # pageview that matches the hogql test_accounts filter - "events": [{"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}], + "events": [ + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + } + ], "filter_test_accounts": False, }, ) @@ -2531,8 +3076,18 @@ def test_event_filter_with_two_events_and_multiple_teams(self): team=self.team, data={ "events": [ - {"id": "$pageview", "type": "events", "order": 0, "name": "$pageview"}, - {"id": "$pageleave", "type": "events", "order": 0, "name": "$pageleave"}, + { + "id": "$pageview", + "type": "events", + "order": 0, + "name": "$pageview", + }, + { + "id": "$pageleave", + "type": "events", + "order": 0, + "name": "$pageleave", + }, ], }, ) @@ -2585,11 +3140,19 @@ def test_event_filter_with_group_filter(self): GroupTypeMapping.objects.create(team=self.team, group_type="project", group_type_index=0) create_group( - team_id=self.team.pk, group_type_index=0, group_key="project:1", properties={"name": "project one"} + team_id=self.team.pk, + group_type_index=0, + group_key="project:1", + properties={"name": "project one"}, ) GroupTypeMapping.objects.create(team=self.team, group_type="organization", group_type_index=1) - create_group(team_id=self.team.pk, group_type_index=1, group_key="org:1", properties={"name": "org one"}) + create_group( + team_id=self.team.pk, + group_type_index=1, + group_key="org:1", + properties={"name": "org one"}, + ) self.create_event( "user", diff --git a/posthog/session_recordings/queries/test/test_session_recording_properties.py b/posthog/session_recordings/queries/test/test_session_recording_properties.py index 9844d77006721..aa152b0b2fa16 100644 --- a/posthog/session_recordings/queries/test/test_session_recording_properties.py +++ b/posthog/session_recordings/queries/test/test_session_recording_properties.py @@ -4,9 +4,18 @@ from posthog.models import Person from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter -from posthog.session_recordings.queries.session_recording_properties import SessionRecordingProperties -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary -from posthog.test.base import BaseTest, ClickhouseTestMixin, _create_event, snapshot_clickhouse_queries +from posthog.session_recordings.queries.session_recording_properties import ( + SessionRecordingProperties, +) +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) +from posthog.test.base import ( + BaseTest, + ClickhouseTestMixin, + _create_event, + snapshot_clickhouse_queries, +) class TestSessionRecordingProperties(BaseTest, ClickhouseTestMixin): @@ -20,7 +29,13 @@ def create_event( ): if team is None: team = self.team - _create_event(team=team, event=event_name, timestamp=timestamp, distinct_id=distinct_id, properties=properties) + _create_event( + team=team, + event=event_name, + timestamp=timestamp, + distinct_id=distinct_id, + properties=properties, + ) @property def base_time(self): @@ -77,7 +92,10 @@ def test_properties_list(self): self.assertEqual(session_recordings_properties[0]["properties"]["$browser"], "Chrome") self.assertEqual(session_recordings_properties[0]["properties"]["$os"], "Mac OS X") self.assertEqual(session_recordings_properties[0]["properties"]["$device_type"], "Desktop") - self.assertEqual(session_recordings_properties[0]["properties"]["$current_url"], "https://blah.com/blah") + self.assertEqual( + session_recordings_properties[0]["properties"]["$current_url"], + "https://blah.com/blah", + ) self.assertEqual(session_recordings_properties[0]["properties"]["$host"], "blah.com") self.assertEqual(session_recordings_properties[0]["properties"]["$pathname"], "/blah") self.assertEqual(session_recordings_properties[0]["properties"]["$geoip_country_code"], "KR") diff --git a/posthog/session_recordings/queries/test/test_session_replay_events.py b/posthog/session_recordings/queries/test/test_session_replay_events.py index bbdec4ea0cc3e..04393f8500c07 100644 --- a/posthog/session_recordings/queries/test/test_session_replay_events.py +++ b/posthog/session_recordings/queries/test/test_session_replay_events.py @@ -1,6 +1,8 @@ from posthog.models import Team from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ClickhouseTestMixin, APIBaseTest from dateutil.relativedelta import relativedelta from django.utils.timezone import now @@ -63,6 +65,8 @@ def test_get_metadata_does_not_leak_between_teams(self) -> None: def test_get_metadata_filters_by_date(self) -> None: metadata = SessionReplayEvents().get_metadata( - session_id="1", team=self.team, recording_start_time=self.base_time + relativedelta(days=2) + session_id="1", + team=self.team, + recording_start_time=self.base_time + relativedelta(days=2), ) assert metadata is None diff --git a/posthog/session_recordings/queries/test/test_session_replay_summaries.py b/posthog/session_recordings/queries/test/test_session_replay_summaries.py index 5a1e9b94db842..6d3376d467ae6 100644 --- a/posthog/session_recordings/queries/test/test_session_replay_summaries.py +++ b/posthog/session_recordings/queries/test/test_session_replay_summaries.py @@ -9,7 +9,9 @@ from posthog.models import Team from posthog.models.event.util import format_clickhouse_timestamp from posthog.queries.app_metrics.serializers import AppMetricsRequestSerializer -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import BaseTest, ClickhouseTestMixin, snapshot_clickhouse_queries diff --git a/posthog/session_recordings/realtime_snapshots.py b/posthog/session_recordings/realtime_snapshots.py index 20e8a0846440c..e1191c4ddb37e 100644 --- a/posthog/session_recordings/realtime_snapshots.py +++ b/posthog/session_recordings/realtime_snapshots.py @@ -40,7 +40,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op # We always publish as it could be that a rebalance has occured and the consumer doesn't know it should be # sending data to redis - redis.publish(SUBSCRIPTION_CHANNEL, json.dumps({"team_id": team_id, "session_id": session_id})) + redis.publish( + SUBSCRIPTION_CHANNEL, + json.dumps({"team_id": team_id, "session_id": session_id}), + ) if not encoded_snapshots and attempt_count < ATTEMPT_MAX: logger.info( @@ -50,7 +53,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op attempt_count=attempt_count, ) # If we don't have it we could be in the process of getting it and syncing it - redis.publish(SUBSCRIPTION_CHANNEL, json.dumps({"team_id": team_id, "session_id": session_id})) + redis.publish( + SUBSCRIPTION_CHANNEL, + json.dumps({"team_id": team_id, "session_id": session_id}), + ) PUBLISHED_REALTIME_SUBSCRIPTIONS_COUNTER.labels( team_id=team_id, session_id=session_id, attempt_count=attempt_count ).inc() @@ -73,7 +79,10 @@ def get_realtime_snapshots(team_id: str, session_id: str, attempt_count=0) -> Op # very broad capture to see if there are any unexpected errors capture_exception( e, - extras={"attempt_count": attempt_count, "operation": "get_realtime_snapshots"}, + extras={ + "attempt_count": attempt_count, + "operation": "get_realtime_snapshots", + }, tags={"team_id": team_id, "session_id": session_id}, ) raise e diff --git a/posthog/session_recordings/session_recording_api.py b/posthog/session_recordings/session_recording_api.py index 827703006340c..6996d2c990460 100644 --- a/posthog/session_recordings/session_recording_api.py +++ b/posthog/session_recordings/session_recording_api.py @@ -29,17 +29,26 @@ SharingTokenPermission, TeamMemberAccessPermission, ) -from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed +from posthog.session_recordings.models.session_recording_event import ( + SessionRecordingViewed, +) from posthog.session_recordings.queries.session_recording_list_from_replay_summary import ( SessionRecordingListFromReplaySummary, SessionIdEventsQuery, ) -from posthog.session_recordings.queries.session_recording_properties import SessionRecordingProperties -from posthog.rate_limit import ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle +from posthog.session_recordings.queries.session_recording_properties import ( + SessionRecordingProperties, +) +from posthog.rate_limit import ( + ClickHouseBurstRateThrottle, + ClickHouseSustainedRateThrottle, +) from posthog.session_recordings.queries.session_replay_events import SessionReplayEvents from posthog.session_recordings.realtime_snapshots import get_realtime_snapshots -from posthog.session_recordings.snapshots.convert_legacy_snapshots import convert_original_version_lts_recording +from posthog.session_recordings.snapshots.convert_legacy_snapshots import ( + convert_original_version_lts_recording, +) from posthog.storage import object_storage from prometheus_client import Counter @@ -130,7 +139,11 @@ class SessionRecordingSnapshotsSerializer(serializers.Serializer): class SessionRecordingViewSet(StructuredViewSetMixin, viewsets.GenericViewSet): - permission_classes = [IsAuthenticated, ProjectMembershipNecessaryPermissions, TeamMemberAccessPermission] + permission_classes = [ + IsAuthenticated, + ProjectMembershipNecessaryPermissions, + TeamMemberAccessPermission, + ] throttle_classes = [ClickHouseBurstRateThrottle, ClickHouseSustainedRateThrottle] serializer_class = SessionRecordingSerializer # We don't use this @@ -269,7 +282,9 @@ def snapshots(self, request: request.Request, **kwargs): event_properties["$session_id"] = request.headers["X-POSTHOG-SESSION-ID"] posthoganalytics.capture( - self._distinct_id_from_request(request), "v2 session recording snapshots viewed", event_properties + self._distinct_id_from_request(request), + "v2 session recording snapshots viewed", + event_properties, ) if source: @@ -338,7 +353,9 @@ def snapshots(self, request: request.Request, **kwargs): event_properties["source"] = "realtime" event_properties["snapshots_length"] = len(snapshots) posthoganalytics.capture( - self._distinct_id_from_request(request), "session recording snapshots v2 loaded", event_properties + self._distinct_id_from_request(request), + "session recording snapshots v2 loaded", + event_properties, ) response_data["snapshots"] = snapshots @@ -366,7 +383,9 @@ def snapshots(self, request: request.Request, **kwargs): event_properties["source"] = "blob" event_properties["blob_key"] = blob_key posthoganalytics.capture( - self._distinct_id_from_request(request), "session recording snapshots v2 loaded", event_properties + self._distinct_id_from_request(request), + "session recording snapshots v2 loaded", + event_properties, ) with requests.get(url=url, stream=True) as r: @@ -451,9 +470,10 @@ def list_recordings(filter: SessionRecordingsFilter, request: request.Request, c if (all_session_ids and filter.session_ids) or not all_session_ids: # Only go to clickhouse if we still have remaining specified IDs, or we are not specifying IDs - (ch_session_recordings, more_recordings_available) = SessionRecordingListFromReplaySummary( - filter=filter, team=team - ).run() + ( + ch_session_recordings, + more_recordings_available, + ) = SessionRecordingListFromReplaySummary(filter=filter, team=team).run() recordings_from_clickhouse = SessionRecording.get_or_build_from_clickhouse(team, ch_session_recordings) recordings = recordings + recordings_from_clickhouse @@ -462,7 +482,10 @@ def list_recordings(filter: SessionRecordingsFilter, request: request.Request, c # If we have specified session_ids we need to sort them by the order they were specified if all_session_ids: - recordings = sorted(recordings, key=lambda x: cast(List[str], all_session_ids).index(x.session_id)) + recordings = sorted( + recordings, + key=lambda x: cast(List[str], all_session_ids).index(x.session_id), + ) if not request.user.is_authenticated: # for mypy raise exceptions.NotAuthenticated() diff --git a/posthog/session_recordings/snapshots/convert_legacy_snapshots.py b/posthog/session_recordings/snapshots/convert_legacy_snapshots.py index a60e1b74717e0..963016d0e869a 100644 --- a/posthog/session_recordings/snapshots/convert_legacy_snapshots.py +++ b/posthog/session_recordings/snapshots/convert_legacy_snapshots.py @@ -18,12 +18,17 @@ def _save_converted_content_back_to_storage(converted_content: str, recording: SessionRecording) -> str: try: - from ee.session_recordings.session_recording_extensions import save_recording_with_new_content + from ee.session_recordings.session_recording_extensions import ( + save_recording_with_new_content, + ) return save_recording_with_new_content(recording, converted_content) except ImportError: # not running in EE context... shouldn't get here - logger.error("attempted_to_save_converted_content_back_to_storage_in_non_ee_context", recording_id=recording.id) + logger.error( + "attempted_to_save_converted_content_back_to_storage_in_non_ee_context", + recording_id=recording.id, + ) return "" diff --git a/posthog/session_recordings/sql/session_recording_event_sql.py b/posthog/session_recordings/sql/session_recording_event_sql.py index 908b4b4034ddd..fc52f27fbdae8 100644 --- a/posthog/session_recordings/sql/session_recording_event_sql.py +++ b/posthog/session_recordings/sql/session_recording_event_sql.py @@ -2,7 +2,11 @@ from posthog.clickhouse.indexes import index_by_kafka_timestamp from posthog.clickhouse.kafka_engine import KAFKA_COLUMNS, kafka_engine, ttl_period -from posthog.clickhouse.table_engines import Distributed, ReplacingMergeTree, ReplicationScheme +from posthog.clickhouse.table_engines import ( + Distributed, + ReplacingMergeTree, + ReplicationScheme, +) from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_SESSION_RECORDING_EVENTS SESSION_RECORDING_EVENTS_DATA_TABLE = lambda: "sharded_session_recording_events" @@ -72,7 +76,9 @@ SESSION_RECORDING_EVENTS_DATA_TABLE_ENGINE = lambda: ReplacingMergeTree( - "session_recording_events", ver="_timestamp", replication_scheme=ReplicationScheme.SHARDED + "session_recording_events", + ver="_timestamp", + replication_scheme=ReplicationScheme.SHARDED, ) SESSION_RECORDING_EVENTS_TABLE_SQL = lambda: ( SESSION_RECORDING_EVENTS_TABLE_BASE_SQL @@ -129,7 +135,10 @@ WRITABLE_SESSION_RECORDING_EVENTS_TABLE_SQL = lambda: SESSION_RECORDING_EVENTS_TABLE_BASE_SQL.format( table_name="writable_session_recording_events", cluster=settings.CLICKHOUSE_CLUSTER, - engine=Distributed(data_table=SESSION_RECORDING_EVENTS_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"), + engine=Distributed( + data_table=SESSION_RECORDING_EVENTS_DATA_TABLE(), + sharding_key="sipHash64(distinct_id)", + ), extra_fields=KAFKA_COLUMNS, materialized_columns="", ) @@ -138,7 +147,10 @@ DISTRIBUTED_SESSION_RECORDING_EVENTS_TABLE_SQL = lambda: SESSION_RECORDING_EVENTS_TABLE_BASE_SQL.format( table_name="session_recording_events", cluster=settings.CLICKHOUSE_CLUSTER, - engine=Distributed(data_table=SESSION_RECORDING_EVENTS_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"), + engine=Distributed( + data_table=SESSION_RECORDING_EVENTS_DATA_TABLE(), + sharding_key="sipHash64(distinct_id)", + ), extra_fields=KAFKA_COLUMNS, materialized_columns=SESSION_RECORDING_EVENTS_PROXY_MATERIALIZED_COLUMNS, ) diff --git a/posthog/session_recordings/sql/session_replay_event_migrations_sql.py b/posthog/session_recordings/sql/session_replay_event_migrations_sql.py index ac897fccc1d08..dcf8e5abd809d 100644 --- a/posthog/session_recordings/sql/session_replay_event_migrations_sql.py +++ b/posthog/session_recordings/sql/session_replay_event_migrations_sql.py @@ -1,6 +1,8 @@ from django.conf import settings -from posthog.session_recordings.sql.session_replay_event_sql import SESSION_REPLAY_EVENTS_DATA_TABLE +from posthog.session_recordings.sql.session_replay_event_sql import ( + SESSION_REPLAY_EVENTS_DATA_TABLE, +) DROP_SESSION_REPLAY_EVENTS_TABLE_MV_SQL = ( lambda: "DROP TABLE IF EXISTS session_replay_events_mv ON CLUSTER {cluster}".format( diff --git a/posthog/session_recordings/sql/session_replay_event_sql.py b/posthog/session_recordings/sql/session_replay_event_sql.py index dfe839843979f..e7c2576e93f66 100644 --- a/posthog/session_recordings/sql/session_replay_event_sql.py +++ b/posthog/session_recordings/sql/session_replay_event_sql.py @@ -1,7 +1,11 @@ from django.conf import settings from posthog.clickhouse.kafka_engine import kafka_engine -from posthog.clickhouse.table_engines import Distributed, ReplicationScheme, AggregatingMergeTree +from posthog.clickhouse.table_engines import ( + Distributed, + ReplicationScheme, + AggregatingMergeTree, +) from posthog.kafka_client.topics import KAFKA_CLICKHOUSE_SESSION_REPLAY_EVENTS SESSION_REPLAY_EVENTS_DATA_TABLE = lambda: "sharded_session_replay_events" @@ -147,7 +151,10 @@ WRITABLE_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: SESSION_REPLAY_EVENTS_TABLE_BASE_SQL.format( table_name="writable_session_replay_events", cluster=settings.CLICKHOUSE_CLUSTER, - engine=Distributed(data_table=SESSION_REPLAY_EVENTS_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"), + engine=Distributed( + data_table=SESSION_REPLAY_EVENTS_DATA_TABLE(), + sharding_key="sipHash64(distinct_id)", + ), ) @@ -155,7 +162,10 @@ DISTRIBUTED_SESSION_REPLAY_EVENTS_TABLE_SQL = lambda: SESSION_REPLAY_EVENTS_TABLE_BASE_SQL.format( table_name="session_replay_events", cluster=settings.CLICKHOUSE_CLUSTER, - engine=Distributed(data_table=SESSION_REPLAY_EVENTS_DATA_TABLE(), sharding_key="sipHash64(distinct_id)"), + engine=Distributed( + data_table=SESSION_REPLAY_EVENTS_DATA_TABLE(), + sharding_key="sipHash64(distinct_id)", + ), ) diff --git a/posthog/session_recordings/test/test_lts_session_recordings.py b/posthog/session_recordings/test/test_lts_session_recordings.py index b16d873b93d7b..e7de94464c18f 100644 --- a/posthog/session_recordings/test/test_lts_session_recordings.py +++ b/posthog/session_recordings/test/test_lts_session_recordings.py @@ -19,7 +19,10 @@ def setUp(self): # Create a new team each time to ensure no clashing between tests self.team = Team.objects.create(organization=self.organization, name="New Team") - @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True) + @patch( + "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", + return_value=True, + ) @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects") def test_2023_08_01_version_stored_snapshots_can_be_gathered( self, mock_list_objects: MagicMock, _mock_exists: MagicMock @@ -72,7 +75,10 @@ def list_objects_func(path: str) -> List[str]: ], } - @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True) + @patch( + "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", + return_value=True, + ) @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects") def test_original_version_stored_snapshots_can_be_gathered( self, mock_list_objects: MagicMock, _mock_exists: MagicMock @@ -112,7 +118,10 @@ def list_objects_func(path: str) -> List[str]: ], } - @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True) + @patch( + "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", + return_value=True, + ) @patch("posthog.session_recordings.session_recording_api.requests.get") @patch("posthog.session_recordings.session_recording_api.object_storage.get_presigned_url") @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects") @@ -173,7 +182,10 @@ def list_objects_func(path: str) -> List[str]: assert response_data == "the file contents" - @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True) + @patch( + "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", + return_value=True, + ) @patch("posthog.session_recordings.session_recording_api.requests.get") @patch("posthog.session_recordings.session_recording_api.object_storage.tag") @patch("posthog.session_recordings.session_recording_api.object_storage.write") diff --git a/posthog/session_recordings/test/test_session_recording_helpers.py b/posthog/session_recordings/test/test_session_recording_helpers.py index 6c64d84efaf78..d59cf816dbf83 100644 --- a/posthog/session_recordings/test/test_session_recording_helpers.py +++ b/posthog/session_recordings/test/test_session_recording_helpers.py @@ -244,14 +244,20 @@ def test_new_ingestion_large_full_snapshot_is_separated(raw_snapshot_events, moc "distinct_id": "abc123", "$session_id": "1234", "$window_id": "1", - "$snapshot_items": [{"type": 3, "timestamp": 1546300800000}, {"type": 3, "timestamp": 1546300800000}], + "$snapshot_items": [ + {"type": 3, "timestamp": 1546300800000}, + {"type": 3, "timestamp": 1546300800000}, + ], }, }, ] def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_events, mocker: MockerFixture): - mocker.patch("posthog.models.utils.UUIDT", return_value="0178495e-8521-0000-8e1c-2652fa57099b") + mocker.patch( + "posthog.models.utils.UUIDT", + return_value="0178495e-8521-0000-8e1c-2652fa57099b", + ) mocker.patch("time.time", return_value=0) almost_too_big_payloads = [ @@ -265,7 +271,11 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event "properties": { "$session_id": "1234", "$window_id": "1", - "$snapshot_data": {"type": 7, "timestamp": 234, "something": almost_too_big_payloads[0]}, + "$snapshot_data": { + "type": 7, + "timestamp": 234, + "something": almost_too_big_payloads[0], + }, "distinct_id": "abc123", }, }, @@ -274,7 +284,11 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event "properties": { "$session_id": "1234", "$window_id": "1", - "$snapshot_data": {"type": 8, "timestamp": 123, "something": almost_too_big_payloads[1]}, + "$snapshot_data": { + "type": 8, + "timestamp": 123, + "something": almost_too_big_payloads[1], + }, "distinct_id": "abc123", }, }, @@ -285,7 +299,13 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event "properties": { "$session_id": "1234", "$window_id": "1", - "$snapshot_items": [{"type": 7, "timestamp": 234, "something": almost_too_big_payloads[0]}], + "$snapshot_items": [ + { + "type": 7, + "timestamp": 234, + "something": almost_too_big_payloads[0], + } + ], "distinct_id": "abc123", }, }, @@ -294,7 +314,13 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event "properties": { "$session_id": "1234", "$window_id": "1", - "$snapshot_items": [{"type": 8, "timestamp": 123, "something": almost_too_big_payloads[1]}], + "$snapshot_items": [ + { + "type": 8, + "timestamp": 123, + "something": almost_too_big_payloads[1], + } + ], "distinct_id": "abc123", }, }, @@ -302,7 +328,10 @@ def test_new_ingestion_large_non_full_snapshots_are_separated(raw_snapshot_event def test_new_ingestion_groups_using_snapshot_bytes_if_possible(raw_snapshot_events, mocker: MockerFixture): - mocker.patch("posthog.models.utils.UUIDT", return_value="0178495e-8521-0000-8e1c-2652fa57099b") + mocker.patch( + "posthog.models.utils.UUIDT", + return_value="0178495e-8521-0000-8e1c-2652fa57099b", + ) mocker.patch("time.time", return_value=0) almost_too_big_event = { @@ -350,7 +379,11 @@ def test_new_ingestion_groups_using_snapshot_bytes_if_possible(raw_snapshot_even }, ] - assert [event["properties"]["$snapshot_bytes"] for event in events] == [106, 1072, 159] + assert [event["properties"]["$snapshot_bytes"] for event in events] == [ + 106, + 1072, + 159, + ] space_with_headroom = math.ceil((106 + 1072 + 50) * 1.05) assert list(mock_capture_flow(events, max_size_bytes=space_with_headroom)[1]) == [ diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index 61c05d993ee4a..3dca9b46b9fb2 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -12,13 +12,17 @@ from freezegun import freeze_time from rest_framework import status -from posthog.session_recordings.models.session_recording_event import SessionRecordingViewed +from posthog.session_recordings.models.session_recording_event import ( + SessionRecordingViewed, +) from posthog.api.test.test_team import create_team from posthog.constants import SESSION_RECORDINGS_FILTER_IDS from posthog.models import Organization, Person, SessionRecording from posthog.models.filters.session_recordings_filter import SessionRecordingsFilter from posthog.models.team import Team -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.test.base import ( APIBaseTest, ClickhouseTestMixin, @@ -122,10 +126,14 @@ def create_snapshots( def test_get_session_recordings(self): user = Person.objects.create( - team=self.team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["user"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) user2 = Person.objects.create( - team=self.team, distinct_ids=["user2"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["user2"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) base_time = (now() - relativedelta(days=1)).replace(microsecond=0) session_id_one = f"test_get_session_recordings-1-{uuid.uuid4()}" @@ -160,7 +168,15 @@ def test_get_session_recordings(self): False, user2.pk, ), - (session_id_one, "user", base_time, base_time + relativedelta(seconds=30), 30, False, user.pk), + ( + session_id_one, + "user", + base_time, + base_time + relativedelta(seconds=30), + 30, + False, + user.pk, + ), ] @patch("posthog.session_recordings.session_recording_api.SessionRecordingListFromReplaySummary") @@ -170,7 +186,7 @@ def test_console_log_filters_are_correctly_passed_to_listing(self, mock_summary_ self.client.get(f'/api/projects/{self.team.id}/session_recordings?console_logs=["warn", "error"]') assert len(mock_summary_lister.call_args_list) == 1 - filter_passed_to_mock: SessionRecordingsFilter = mock_summary_lister.call_args_list[0].kwargs["filter"] + filter_passed_to_mock: (SessionRecordingsFilter) = mock_summary_lister.call_args_list[0].kwargs["filter"] assert filter_passed_to_mock.console_logs_filter == ["warn", "error"] @snapshot_postgres_queries @@ -194,7 +210,9 @@ def test_listing_recordings_is_not_nplus1_for_persons(self): def _person_with_snapshots(self, base_time: datetime, distinct_id: str = "user", session_id: str = "1") -> None: Person.objects.create( - team=self.team, distinct_ids=[distinct_id], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=[distinct_id], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) self.create_snapshot(distinct_id, session_id, base_time) self.create_snapshot(distinct_id, session_id, base_time + relativedelta(seconds=10)) @@ -203,10 +221,14 @@ def _person_with_snapshots(self, base_time: datetime, distinct_id: str = "user", def test_session_recordings_dont_leak_teams(self) -> None: another_team = Team.objects.create(organization=self.organization) Person.objects.create( - team=another_team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=another_team, + distinct_ids=["user"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) Person.objects.create( - team=self.team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["user"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) base_time = (now() - relativedelta(days=1)).replace(microsecond=0) @@ -236,7 +258,9 @@ def test_session_recording_for_user_with_multiple_distinct_ids(self) -> None: def test_viewed_state_of_session_recording_version_1(self): Person.objects.create( - team=self.team, distinct_ids=["u1"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["u1"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) base_time = (now() - timedelta(days=1)).replace(microsecond=0) SessionRecordingViewed.objects.create(team=self.team, user=self.user, session_id="1") @@ -252,7 +276,9 @@ def test_viewed_state_of_session_recording_version_1(self): def test_viewed_state_of_session_recording_version_3(self): Person.objects.create( - team=self.team, distinct_ids=["u1"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["u1"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) base_time = (now() - timedelta(days=1)).replace(microsecond=0) session_id_one = "1" @@ -272,7 +298,9 @@ def test_viewed_state_of_session_recording_version_3(self): def test_setting_viewed_state_of_session_recording(self): Person.objects.create( - team=self.team, distinct_ids=["u1"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["u1"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) base_time = (now() - relativedelta(days=1)).replace(microsecond=0) @@ -326,7 +354,9 @@ def test_setting_viewed_state_of_session_recording(self): def test_get_single_session_recording_metadata(self): with freeze_time("2023-01-01T12:00:00.000Z"): p = Person.objects.create( - team=self.team, distinct_ids=["d1"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["d1"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) session_recording_id = "session_1" base_time = (now() - relativedelta(days=1)).replace(microsecond=0) @@ -370,7 +400,12 @@ def test_get_single_session_recording_metadata(self): def test_single_session_recording_doesnt_leak_teams(self): another_team = Team.objects.create(organization=self.organization) - self.create_snapshot("user", "id_no_team_leaking", now() - relativedelta(days=1), team_id=another_team.pk) + self.create_snapshot( + "user", + "id_no_team_leaking", + now() - relativedelta(days=1), + team_id=another_team.pk, + ) response = self.client.get(f"/api/projects/{self.team.id}/session_recordings/id_no_team_leaking") self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) @@ -400,7 +435,12 @@ def test_session_recording_doesnt_exist(self): def test_request_to_another_teams_endpoint_returns_401(self): org = Organization.objects.create(name="Separate Org") another_team = Team.objects.create(organization=org) - self.create_snapshot("user", "id_no_team_leaking", now() - relativedelta(days=1), team_id=another_team.pk) + self.create_snapshot( + "user", + "id_no_team_leaking", + now() - relativedelta(days=1), + team_id=another_team.pk, + ) response = self.client.get(f"/api/projects/{another_team.pk}/session_recordings/id_no_team_leaking") self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) @@ -413,11 +453,28 @@ def test_request_to_another_teams_endpoint_returns_401(self): def test_session_ids_filter(self, use_recording_events: bool, api_version: int): with freeze_time("2020-09-13T12:26:40.000Z"): Person.objects.create( - team=self.team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["user"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, + ) + self.create_snapshot( + "user", + "1", + now() - relativedelta(days=1), + use_recording_table=use_recording_events, + ) + self.create_snapshot( + "user", + "2", + now() - relativedelta(days=2), + use_recording_table=use_recording_events, + ) + self.create_snapshot( + "user", + "3", + now() - relativedelta(days=3), + use_recording_table=use_recording_events, ) - self.create_snapshot("user", "1", now() - relativedelta(days=1), use_recording_table=use_recording_events) - self.create_snapshot("user", "2", now() - relativedelta(days=2), use_recording_table=use_recording_events) - self.create_snapshot("user", "3", now() - relativedelta(days=3), use_recording_table=use_recording_events) # Fetch playlist params_string = urlencode({"session_ids": '["1", "2", "3"]', "version": api_version}) @@ -433,7 +490,9 @@ def test_session_ids_filter(self, use_recording_events: bool, api_version: int): def test_empty_list_session_ids_filter_returns_no_recordings(self): with freeze_time("2020-09-13T12:26:40.000Z"): Person.objects.create( - team=self.team, distinct_ids=["user"], properties={"$some_prop": "something", "email": "bob@bob.com"} + team=self.team, + distinct_ids=["user"], + properties={"$some_prop": "something", "email": "bob@bob.com"}, ) self.create_snapshot("user", "1", now() - relativedelta(days=1)) self.create_snapshot("user", "2", now() - relativedelta(days=2)) @@ -455,7 +514,10 @@ def test_delete_session_recording(self): response = self.client.delete(f"/api/projects/{self.team.id}/session_recordings/1") self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) - @patch("ee.session_recordings.session_recording_extensions.object_storage.copy_objects", return_value=2) + @patch( + "ee.session_recordings.session_recording_extensions.object_storage.copy_objects", + return_value=2, + ) def test_persist_session_recording(self, _mock_copy_objects: MagicMock) -> None: self.create_snapshot("user", "1", now() - relativedelta(days=1), team_id=self.team.pk) @@ -473,7 +535,10 @@ def test_persist_session_recording(self, _mock_copy_objects: MagicMock) -> None: # New snapshot loading method @freeze_time("2023-01-01T00:00:00Z") - @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True) + @patch( + "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", + return_value=True, + ) @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects") def test_get_snapshots_v2_default_response(self, mock_list_objects: MagicMock, _mock_exists: MagicMock) -> None: session_id = str(uuid.uuid4()) @@ -510,7 +575,10 @@ def test_get_snapshots_v2_default_response(self, mock_list_objects: MagicMock, _ mock_list_objects.assert_called_with(f"session_recordings/team_id/{self.team.pk}/session_id/{session_id}/data") @freeze_time("2023-01-01T00:00:00Z") - @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True) + @patch( + "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", + return_value=True, + ) @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects") def test_get_snapshots_v2_from_lts(self, mock_list_objects: MagicMock, _mock_exists: MagicMock) -> None: session_id = str(uuid.uuid4()) @@ -568,7 +636,10 @@ def list_objects_func(path: str) -> List[str]: ] @freeze_time("2023-01-01T00:00:00Z") - @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True) + @patch( + "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", + return_value=True, + ) @patch("posthog.session_recordings.session_recording_api.object_storage.list_objects") def test_get_snapshots_v2_default_response_no_realtime_if_old(self, mock_list_objects, _mock_exists) -> None: session_id = str(uuid.uuid4()) @@ -591,12 +662,19 @@ def test_get_snapshots_v2_default_response_no_realtime_if_old(self, mock_list_ob ] } - @patch("posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", return_value=True) + @patch( + "posthog.session_recordings.queries.session_replay_events.SessionReplayEvents.exists", + return_value=True, + ) @patch("posthog.session_recordings.session_recording_api.SessionRecording.get_or_build") @patch("posthog.session_recordings.session_recording_api.object_storage.get_presigned_url") @patch("posthog.session_recordings.session_recording_api.requests") def test_can_get_session_recording_blob( - self, _mock_requests, mock_presigned_url, mock_get_session_recording, _mock_exists + self, + _mock_requests, + mock_presigned_url, + mock_get_session_recording, + _mock_exists, ) -> None: session_id = str(uuid.uuid4()) """API will add session_recordings/team_id/{self.team.pk}/session_id/{session_id}""" @@ -662,7 +740,8 @@ def test_get_via_sharing_token(self, mock_copy_objects: MagicMock) -> None: ) token = self.client.patch( - f"/api/projects/{self.team.id}/session_recordings/{session_id}/sharing", {"enabled": True} + f"/api/projects/{self.team.id}/session_recordings/{session_id}/sharing", + {"enabled": True}, ).json()["access_token"] self.client.logout() @@ -764,7 +843,10 @@ def test_get_matching_events(self) -> None: session_id = f"test_get_matching_events-1-{uuid.uuid4()}" self.create_snapshot("user", session_id, base_time) event_id = _create_event( - event="$pageview", properties={"$session_id": session_id}, team=self.team, distinct_id=uuid.uuid4() + event="$pageview", + properties={"$session_id": session_id}, + team=self.team, + distinct_id=uuid.uuid4(), ) # a non-matching session diff --git a/posthog/settings/__init__.py b/posthog/settings/__init__.py index 32b3d87d322ae..099e1812e5311 100644 --- a/posthog/settings/__init__.py +++ b/posthog/settings/__init__.py @@ -46,7 +46,10 @@ # https://posthog.com/docs/self-host/configure/environment-variables debug_queries = get_from_env("DEBUG_QUERIES", False, type_cast=str_to_bool) disable_paid_fs = get_from_env("DISABLE_PAID_FEATURE_SHOWCASING", False, type_cast=str_to_bool) -INSTANCE_PREFERENCES = {"debug_queries": debug_queries, "disable_paid_fs": disable_paid_fs} +INSTANCE_PREFERENCES = { + "debug_queries": debug_queries, + "disable_paid_fs": disable_paid_fs, +} SITE_URL: str = os.getenv("SITE_URL", "http://localhost:8000").rstrip("/") INSTANCE_TAG: str = os.getenv("INSTANCE_TAG", "none") @@ -60,7 +63,10 @@ "DISABLE_MMDB", TEST, type_cast=str_to_bool ) # plugin server setting disabling GeoIP feature PLUGINS_PREINSTALLED_URLS: List[str] = ( - os.getenv("PLUGINS_PREINSTALLED_URLS", "https://www.npmjs.com/package/@posthog/geoip-plugin").split(",") + os.getenv( + "PLUGINS_PREINSTALLED_URLS", + "https://www.npmjs.com/package/@posthog/geoip-plugin", + ).split(",") if not DISABLE_MMDB else [] ) @@ -79,6 +85,9 @@ # Whether kea should be act in verbose mode KEA_VERBOSE_LOGGING = get_from_env("KEA_VERBOSE_LOGGING", False, type_cast=str_to_bool) +# MapLibre Style URL to configure map tile source +MAPLIBRE_STYLE_URL = get_from_env("MAPLIBRE_STYLE_URL", optional=True) + # Only written in specific scripts - do not use outside of them. PERSON_ON_EVENTS_OVERRIDE = get_from_env("PERSON_ON_EVENTS_OVERRIDE", optional=True, type_cast=str_to_bool) diff --git a/posthog/settings/data_stores.py b/posthog/settings/data_stores.py index 49fa73f3bd030..9f6f9ca74cab8 100644 --- a/posthog/settings/data_stores.py +++ b/posthog/settings/data_stores.py @@ -62,7 +62,10 @@ def postgres_config(host: str) -> dict: PG_PASSWORD = os.getenv("PGPASSWORD", "posthog") PG_PORT = os.getenv("PGPORT", "5432") PG_DATABASE = os.getenv("PGDATABASE", "posthog") - DATABASE_URL = os.getenv("DATABASE_URL", f"postgres://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}") + DATABASE_URL = os.getenv( + "DATABASE_URL", + f"postgres://{PG_USER}:{PG_PASSWORD}@{PG_HOST}:{PG_PORT}/{PG_DATABASE}", + ) else: DATABASE_URL = os.getenv("DATABASE_URL", "") diff --git a/posthog/settings/dynamic_settings.py b/posthog/settings/dynamic_settings.py index 94d774b0200f0..b7eb65967fc65 100644 --- a/posthog/settings/dynamic_settings.py +++ b/posthog/settings/dynamic_settings.py @@ -223,4 +223,9 @@ # SECRET_SETTINGS can only be updated but will never be exposed through the API (we do store them plain text in the DB) # On the frontend UI will clearly show which configuration elements are secret and whether they have a set value or not. -SECRET_SETTINGS = ["EMAIL_HOST_PASSWORD", "SLACK_APP_CLIENT_SECRET", "SLACK_APP_SIGNING_SECRET", "SENTRY_AUTH_TOKEN"] +SECRET_SETTINGS = [ + "EMAIL_HOST_PASSWORD", + "SLACK_APP_CLIENT_SECRET", + "SLACK_APP_SIGNING_SECRET", + "SENTRY_AUTH_TOKEN", +] diff --git a/posthog/settings/ingestion.py b/posthog/settings/ingestion.py index bd9edbc6fb03c..b60206d101ae9 100644 --- a/posthog/settings/ingestion.py +++ b/posthog/settings/ingestion.py @@ -1,7 +1,8 @@ import os + import structlog -from posthog.settings.utils import get_from_env, get_list +from posthog.settings.utils import get_from_env, get_list, get_set from posthog.utils import str_to_bool logger = structlog.get_logger(__name__) @@ -32,3 +33,6 @@ REPLAY_RETENTION_DAYS_MIN = get_from_env("REPLAY_RETENTION_DAYS_MIN", type_cast=int, default=30) REPLAY_RETENTION_DAYS_MAX = get_from_env("REPLAY_RETENTION_DAYS_MAX", type_cast=int, default=90) + +NEW_ANALYTICS_CAPTURE_ENDPOINT = os.getenv("NEW_CAPTURE_ENDPOINT", "/i/v0/e/") +NEW_ANALYTICS_CAPTURE_TEAM_IDS = get_set(os.getenv("NEW_ANALYTICS_CAPTURE_TEAM_IDS", "")) diff --git a/posthog/settings/sentry.py b/posthog/settings/sentry.py index 208f3bfd81e2c..f2c36695b62cb 100644 --- a/posthog/settings/sentry.py +++ b/posthog/settings/sentry.py @@ -136,7 +136,12 @@ def sentry_init() -> None: send_default_pii=send_pii, dsn=os.environ["SENTRY_DSN"], release=release, - integrations=[DjangoIntegration(), CeleryIntegration(), RedisIntegration(), sentry_logging], + integrations=[ + DjangoIntegration(), + CeleryIntegration(), + RedisIntegration(), + sentry_logging, + ], request_bodies="always" if send_pii else "never", sample_rate=1.0, # Configures the sample rate for error events, in the range of 0.0 to 1.0 (default). diff --git a/posthog/settings/service_requirements.py b/posthog/settings/service_requirements.py index 2592d73a1be60..79cdc55d51c67 100644 --- a/posthog/settings/service_requirements.py +++ b/posthog/settings/service_requirements.py @@ -8,7 +8,9 @@ SKIP_SERVICE_VERSION_REQUIREMENTS = get_from_env( - "SKIP_SERVICE_VERSION_REQUIREMENTS", TEST or IS_COLLECT_STATIC or DEBUG, type_cast=str_to_bool + "SKIP_SERVICE_VERSION_REQUIREMENTS", + TEST or IS_COLLECT_STATIC or DEBUG, + type_cast=str_to_bool, ) if SKIP_SERVICE_VERSION_REQUIREMENTS and not (TEST or DEBUG): diff --git a/posthog/settings/utils.py b/posthog/settings/utils.py index 9181b04b1fbbd..6dd22dbf97cf8 100644 --- a/posthog/settings/utils.py +++ b/posthog/settings/utils.py @@ -1,5 +1,5 @@ import os -from typing import Any, Callable, List, Optional +from typing import Any, Callable, List, Optional, Set from django.core.exceptions import ImproperlyConfigured @@ -8,7 +8,13 @@ __all__ = ["get_from_env", "get_list", "str_to_bool"] -def get_from_env(key: str, default: Any = None, *, optional: bool = False, type_cast: Optional[Callable] = None) -> Any: +def get_from_env( + key: str, + default: Any = None, + *, + optional: bool = False, + type_cast: Optional[Callable] = None, +) -> Any: value = os.getenv(key) if value is None or value == "": if optional: @@ -26,3 +32,9 @@ def get_list(text: str) -> List[str]: if not text: return [] return [item.strip() for item in text.split(",")] + + +def get_set(text: str) -> Set[str]: + if not text: + return set() + return {item.strip() for item in text.split(",")} diff --git a/posthog/settings/web.py b/posthog/settings/web.py index b062ce632a71a..b846a2486c5df 100644 --- a/posthog/settings/web.py +++ b/posthog/settings/web.py @@ -112,6 +112,10 @@ MIDDLEWARE.insert(0, "django_statsd.middleware.StatsdMiddleware") MIDDLEWARE.append("django_statsd.middleware.StatsdMiddlewareTimer") +if DEBUG: + # Used on local devenv to reverse-proxy all of /i/* to capture-rs on port 3000 + INSTALLED_APPS.append("revproxy") + # Append Enterprise Edition as an app if available try: from ee.apps import EnterpriseConfig # noqa: F401 @@ -179,7 +183,12 @@ SOCIAL_AUTH_STRATEGY = "social_django.strategy.DjangoStrategy" SOCIAL_AUTH_STORAGE = "social_django.models.DjangoStorage" -SOCIAL_AUTH_FIELDS_STORED_IN_SESSION = ["invite_id", "user_name", "email_opt_in", "organization_name"] +SOCIAL_AUTH_FIELDS_STORED_IN_SESSION = [ + "invite_id", + "user_name", + "email_opt_in", + "organization_name", +] SOCIAL_AUTH_GITHUB_SCOPE = ["user:email"] SOCIAL_AUTH_GITHUB_KEY = os.getenv("SOCIAL_AUTH_GITHUB_KEY") SOCIAL_AUTH_GITHUB_SECRET = os.getenv("SOCIAL_AUTH_GITHUB_SECRET") @@ -218,7 +227,10 @@ STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles") STATIC_URL = "/static/" -STATICFILES_DIRS = [os.path.join(BASE_DIR, "frontend/dist"), os.path.join(BASE_DIR, "posthog/year_in_posthog/images")] +STATICFILES_DIRS = [ + os.path.join(BASE_DIR, "frontend/dist"), + os.path.join(BASE_DIR, "posthog/year_in_posthog/images"), +] STATICFILES_STORAGE = "whitenoise.storage.ManifestStaticFilesStorage" AUTH_USER_MODEL = "posthog.User" diff --git a/posthog/storage/object_storage.py b/posthog/storage/object_storage.py index 79ea0c90ceb19..a1ff639b1c293 100644 --- a/posthog/storage/object_storage.py +++ b/posthog/storage/object_storage.py @@ -111,7 +111,12 @@ def list_objects(self, bucket: str, prefix: str) -> Optional[List[str]]: else: return None except Exception as e: - logger.error("object_storage.list_objects_failed", bucket=bucket, prefix=prefix, error=e) + logger.error( + "object_storage.list_objects_failed", + bucket=bucket, + prefix=prefix, + error=e, + ) capture_exception(e) return None @@ -128,7 +133,13 @@ def read_bytes(self, bucket: str, key: str) -> Optional[bytes]: s3_response = self.aws_client.get_object(Bucket=bucket, Key=key) return s3_response["Body"].read() except Exception as e: - logger.error("object_storage.read_failed", bucket=bucket, file_name=key, error=e, s3_response=s3_response) + logger.error( + "object_storage.read_failed", + bucket=bucket, + file_name=key, + error=e, + s3_response=s3_response, + ) capture_exception(e) raise ObjectStorageError("read failed") from e @@ -149,7 +160,13 @@ def write(self, bucket: str, key: str, content: Union[str, bytes], extras: Dict try: s3_response = self.aws_client.put_object(Bucket=bucket, Body=content, Key=key, **(extras or {})) except Exception as e: - logger.error("object_storage.write_failed", bucket=bucket, file_name=key, error=e, s3_response=s3_response) + logger.error( + "object_storage.write_failed", + bucket=bucket, + file_name=key, + error=e, + s3_response=s3_response, + ) capture_exception(e) raise ObjectStorageError("write failed") from e @@ -165,7 +182,10 @@ def copy_objects(self, bucket: str, source_prefix: str, target_prefix: str) -> i return len(source_objects) except Exception as e: logger.error( - "object_storage.copy_objects_failed", source_prefix=source_prefix, target_prefix=target_prefix, error=e + "object_storage.copy_objects_failed", + source_prefix=source_prefix, + target_prefix=target_prefix, + error=e, ) capture_exception(e) return None @@ -186,7 +206,11 @@ def object_storage_client() -> ObjectStorageClient: endpoint_url=settings.OBJECT_STORAGE_ENDPOINT, aws_access_key_id=settings.OBJECT_STORAGE_ACCESS_KEY_ID, aws_secret_access_key=settings.OBJECT_STORAGE_SECRET_ACCESS_KEY, - config=Config(signature_version="s3v4", connect_timeout=1, retries={"max_attempts": 1}), + config=Config( + signature_version="s3v4", + connect_timeout=1, + retries={"max_attempts": 1}, + ), region_name=settings.OBJECT_STORAGE_REGION, ) ) @@ -196,7 +220,10 @@ def object_storage_client() -> ObjectStorageClient: def write(file_name: str, content: Union[str, bytes], extras: Dict | None = None) -> None: return object_storage_client().write( - bucket=settings.OBJECT_STORAGE_BUCKET, key=file_name, content=content, extras=extras + bucket=settings.OBJECT_STORAGE_BUCKET, + key=file_name, + content=content, + extras=extras, ) @@ -219,7 +246,9 @@ def list_objects(prefix: str) -> Optional[List[str]]: def copy_objects(source_prefix: str, target_prefix: str) -> int: return ( object_storage_client().copy_objects( - bucket=settings.OBJECT_STORAGE_BUCKET, source_prefix=source_prefix, target_prefix=target_prefix + bucket=settings.OBJECT_STORAGE_BUCKET, + source_prefix=source_prefix, + target_prefix=target_prefix, ) or 0 ) diff --git a/posthog/storage/test/test_object_storage.py b/posthog/storage/test/test_object_storage.py index 3544df570d4c2..f24114911ba9e 100644 --- a/posthog/storage/test/test_object_storage.py +++ b/posthog/storage/test/test_object_storage.py @@ -10,7 +10,14 @@ OBJECT_STORAGE_ENDPOINT, OBJECT_STORAGE_SECRET_ACCESS_KEY, ) -from posthog.storage.object_storage import health_check, read, write, get_presigned_url, list_objects, copy_objects +from posthog.storage.object_storage import ( + health_check, + read, + write, + get_presigned_url, + list_objects, + copy_objects, +) from posthog.test.base import APIBaseTest TEST_BUCKET = "test_storage_bucket" @@ -113,7 +120,8 @@ def test_can_copy_objects_between_prefixes(self) -> None: write(file_name, "my content".encode("utf-8")) copied_count = copy_objects( - source_prefix=f"{TEST_BUCKET}/{shared_prefix}", target_prefix=f"{TEST_BUCKET}/the_destination/folder" + source_prefix=f"{TEST_BUCKET}/{shared_prefix}", + target_prefix=f"{TEST_BUCKET}/the_destination/folder", ) assert copied_count == 3 @@ -137,7 +145,8 @@ def test_can_safely_copy_objects_from_unknown_prefix(self) -> None: write(file_name, "my content".encode("utf-8")) copied_count = copy_objects( - source_prefix=f"nothing_here", target_prefix=f"{TEST_BUCKET}/the_destination/folder" + source_prefix=f"nothing_here", + target_prefix=f"{TEST_BUCKET}/the_destination/folder", ) assert copied_count == 0 diff --git a/posthog/tasks/async_migrations.py b/posthog/tasks/async_migrations.py index 608fead3e07a3..ae505b44131e5 100644 --- a/posthog/tasks/async_migrations.py +++ b/posthog/tasks/async_migrations.py @@ -7,7 +7,11 @@ start_async_migration, update_migration_progress, ) -from posthog.async_migrations.utils import force_stop_migration, process_error, trigger_migration +from posthog.async_migrations.utils import ( + force_stop_migration, + process_error, + trigger_migration, +) from posthog.celery import app from posthog.models.instance_setting import get_instance_setting @@ -44,7 +48,11 @@ def check_async_migration_health() -> None: # failures and successes are handled elsewhere # pending means we haven't picked up the task yet # retry is not possible as max_retries == 0 - if migration_task_celery_state not in (states.STARTED, states.PENDING, states.FAILURE): + if migration_task_celery_state not in ( + states.STARTED, + states.PENDING, + states.FAILURE, + ): return inspector = app.control.inspect() diff --git a/posthog/tasks/calculate_cohort.py b/posthog/tasks/calculate_cohort.py index f3c09f65119e4..1c4492071c78a 100644 --- a/posthog/tasks/calculate_cohort.py +++ b/posthog/tasks/calculate_cohort.py @@ -62,7 +62,10 @@ def calculate_cohort_from_list(cohort_id: int, items: List[str]) -> None: @shared_task(ignore_result=True, max_retries=1) def insert_cohort_from_insight_filter(cohort_id: int, filter_data: Dict[str, Any]) -> None: - from posthog.api.cohort import insert_cohort_actors_into_ch, insert_cohort_people_into_pg + from posthog.api.cohort import ( + insert_cohort_actors_into_ch, + insert_cohort_people_into_pg, + ) cohort = Cohort.objects.get(pk=cohort_id) diff --git a/posthog/tasks/check_clickhouse_schema_drift.py b/posthog/tasks/check_clickhouse_schema_drift.py index d4ed1347f4419..bea00530b7eba 100644 --- a/posthog/tasks/check_clickhouse_schema_drift.py +++ b/posthog/tasks/check_clickhouse_schema_drift.py @@ -91,7 +91,8 @@ def get_clickhouse_schema_drift( def check_clickhouse_schema_drift( - clickhouse_nodes: List[Tuple[str]] = [], clickhouse_schema: List[Tuple[str, str, str]] = [] + clickhouse_nodes: List[Tuple[str]] = [], + clickhouse_schema: List[Tuple[str, str, str]] = [], ) -> None: try: if not clickhouse_nodes: diff --git a/posthog/tasks/email.py b/posthog/tasks/email.py index 44bd8eae03087..bd7f60188b166 100644 --- a/posthog/tasks/email.py +++ b/posthog/tasks/email.py @@ -9,7 +9,15 @@ from posthog.celery import app from posthog.cloud_utils import is_cloud from posthog.email import EmailMessage, is_email_available -from posthog.models import Organization, OrganizationInvite, OrganizationMembership, Plugin, PluginConfig, Team, User +from posthog.models import ( + Organization, + OrganizationInvite, + OrganizationMembership, + Plugin, + PluginConfig, + Team, + User, +) from posthog.user_permissions import UserPermissions logger = structlog.get_logger(__name__) @@ -119,7 +127,10 @@ def send_email_verification(user_id: int, token: str) -> None: retry_backoff=True, ) def send_fatal_plugin_error( - plugin_config_id: int, plugin_config_updated_at: Optional[str], error: str, is_system_error: bool + plugin_config_id: int, + plugin_config_updated_at: Optional[str], + error: str, + is_system_error: bool, ) -> None: if not is_email_available(with_absolute_urls=True): return @@ -131,7 +142,12 @@ def send_fatal_plugin_error( campaign_key=campaign_key, subject=f"[Alert] {plugin} has been disabled in project {team} due to a fatal error", template_name="fatal_plugin_error", - template_context={"plugin": plugin, "team": team, "error": error, "is_system_error": is_system_error}, + template_context={ + "plugin": plugin, + "team": team, + "error": error, + "is_system_error": is_system_error, + }, ) memberships_to_email = [] memberships = OrganizationMembership.objects.prefetch_related("user", "organization").filter( @@ -181,13 +197,21 @@ def send_email_change_emails(now_iso: str, user_name: str, old_address: str, new campaign_key=f"email_change_old_address_{now_iso}", subject="This is no longer your PostHog account email", template_name="email_change_old_address", - template_context={"user_name": user_name, "old_address": old_address, "new_address": new_address}, + template_context={ + "user_name": user_name, + "old_address": old_address, + "new_address": new_address, + }, ) message_new_address = EmailMessage( campaign_key=f"email_change_new_address_{now_iso}", subject="This is your new PostHog account email", template_name="email_change_new_address", - template_context={"user_name": user_name, "old_address": old_address, "new_address": new_address}, + template_context={ + "user_name": user_name, + "old_address": old_address, + "new_address": new_address, + }, ) message_old_address.add_recipient(email=old_address) message_new_address.add_recipient(email=new_address) diff --git a/posthog/tasks/exporter.py b/posthog/tasks/exporter.py index 01c85537602f0..ed41d9d5412d0 100644 --- a/posthog/tasks/exporter.py +++ b/posthog/tasks/exporter.py @@ -34,14 +34,20 @@ # export_asset is used in chords/groups and so must not ignore its results -@app.task(autoretry_for=(Exception,), max_retries=5, retry_backoff=True, acks_late=True, ignore_result=False) +@app.task( + autoretry_for=(Exception,), + max_retries=5, + retry_backoff=True, + acks_late=True, + ignore_result=False, +) def export_asset(exported_asset_id: int, limit: Optional[int] = None) -> None: from posthog.tasks.exports import csv_exporter, image_exporter # if Celery is lagging then you can end up with an exported asset that has had a TTL added # and that TTL has passed, in the exporter we don't care about that. # the TTL is for later cleanup. - exported_asset: ExportedAsset = ExportedAsset.objects_including_ttl_deleted.select_related( + exported_asset: (ExportedAsset) = ExportedAsset.objects_including_ttl_deleted.select_related( "insight", "dashboard" ).get(pk=exported_asset_id) diff --git a/posthog/tasks/exports/csv_exporter.py b/posthog/tasks/exports/csv_exporter.py index 9643244119668..622798774ec1d 100644 --- a/posthog/tasks/exports/csv_exporter.py +++ b/posthog/tasks/exports/csv_exporter.py @@ -12,7 +12,12 @@ from posthog.models.exported_asset import ExportedAsset, save_content from posthog.utils import absolute_uri from .ordered_csv_renderer import OrderedCsvRenderer -from ..exporter import EXPORT_FAILED_COUNTER, EXPORT_ASSET_UNKNOWN_COUNTER, EXPORT_SUCCEEDED_COUNTER, EXPORT_TIMER +from ..exporter import ( + EXPORT_FAILED_COUNTER, + EXPORT_ASSET_UNKNOWN_COUNTER, + EXPORT_SUCCEEDED_COUNTER, + EXPORT_TIMER, +) from ...constants import CSV_EXPORT_LIMIT logger = structlog.get_logger(__name__) @@ -128,12 +133,18 @@ def _convert_response_to_csv_data(data: Any) -> List[Any]: for item in items: if item.get("date"): # Dated means we create a grid - line = {"cohort": item["date"], "cohort size": item["values"][0]["count"]} + line = { + "cohort": item["date"], + "cohort size": item["values"][0]["count"], + } for index, data in enumerate(item["values"]): line[items[index]["label"]] = data["count"] else: # Otherwise we just specify "Period" for titles - line = {"cohort": item["label"], "cohort size": item["values"][0]["count"]} + line = { + "cohort": item["label"], + "cohort size": item["values"][0]["count"], + } for index, data in enumerate(item["values"]): line[f"Period {index}"] = data["count"] @@ -182,7 +193,9 @@ def _export_to_csv(exported_asset: ExportedAsset, limit: int = 1000) -> None: body = resource.get("body", None) next_url = None access_token = encode_jwt( - {"id": exported_asset.created_by_id}, datetime.timedelta(minutes=15), PosthogJwtAudience.IMPERSONATED_USER + {"id": exported_asset.created_by_id}, + datetime.timedelta(minutes=15), + PosthogJwtAudience.IMPERSONATED_USER, ) while len(all_csv_rows) < CSV_EXPORT_LIMIT: @@ -243,13 +256,24 @@ def get_limit_param_key(path: str) -> str: def make_api_call( - access_token: str, body: Any, limit: int, method: str, next_url: Optional[str], path: str + access_token: str, + body: Any, + limit: int, + method: str, + next_url: Optional[str], + path: str, ) -> requests.models.Response: request_url: str = absolute_uri(next_url or path) try: - url = add_query_params(request_url, {get_limit_param_key(request_url): str(limit), "is_csv_export": "1"}) + url = add_query_params( + request_url, + {get_limit_param_key(request_url): str(limit), "is_csv_export": "1"}, + ) response = requests.request( - method=method.lower(), url=url, json=body, headers={"Authorization": f"Bearer {access_token}"} + method=method.lower(), + url=url, + json=body, + headers={"Authorization": f"Bearer {access_token}"}, ) return response except Exception as ex: diff --git a/posthog/tasks/exports/exporter_utils.py b/posthog/tasks/exports/exporter_utils.py index 38b8979f8f467..a47f43aa41710 100644 --- a/posthog/tasks/exports/exporter_utils.py +++ b/posthog/tasks/exports/exporter_utils.py @@ -50,4 +50,8 @@ def log_error_if_site_url_not_reachable() -> None: if not settings.SITE_URL: logger.error("site_url_not_set") elif not is_site_url_reachable(): - logger.error("site_url_not_reachable", site_url=settings.SITE_URL, exception=_site_reachable_exception) + logger.error( + "site_url_not_reachable", + site_url=settings.SITE_URL, + exception=_site_reachable_exception, + ) diff --git a/posthog/tasks/exports/image_exporter.py b/posthog/tasks/exports/image_exporter.py index 057239a929f50..1961d9a456053 100644 --- a/posthog/tasks/exports/image_exporter.py +++ b/posthog/tasks/exports/image_exporter.py @@ -16,8 +16,16 @@ from webdriver_manager.core.os_manager import ChromeType from posthog.caching.fetch_from_cache import synchronously_update_cache -from posthog.models.exported_asset import ExportedAsset, get_public_access_token, save_content -from posthog.tasks.exporter import EXPORT_SUCCEEDED_COUNTER, EXPORT_FAILED_COUNTER, EXPORT_TIMER +from posthog.models.exported_asset import ( + ExportedAsset, + get_public_access_token, + save_content, +) +from posthog.tasks.exporter import ( + EXPORT_SUCCEEDED_COUNTER, + EXPORT_FAILED_COUNTER, + EXPORT_TIMER, +) from posthog.tasks.exports.exporter_utils import log_error_if_site_url_not_reachable from posthog.utils import absolute_uri @@ -111,7 +119,10 @@ def _export_to_png(exported_asset: ExportedAsset) -> None: def _screenshot_asset( - image_path: str, url_to_render: str, screenshot_width: ScreenWidth, wait_for_css_selector: CSSSelector + image_path: str, + url_to_render: str, + screenshot_width: ScreenWidth, + wait_for_css_selector: CSSSelector, ) -> None: driver: Optional[webdriver.Chrome] = None try: diff --git a/posthog/tasks/exports/ordered_csv_renderer.py b/posthog/tasks/exports/ordered_csv_renderer.py index c969772e9d815..1b7a16dd83c3e 100644 --- a/posthog/tasks/exports/ordered_csv_renderer.py +++ b/posthog/tasks/exports/ordered_csv_renderer.py @@ -16,7 +16,6 @@ def tablize(self, data: Any, header: Any = None, labels: Any = None) -> Generato header = data.header if data: - # First, flatten the data (i.e., convert it to a list of # dictionaries that are each exactly one level deep). The key for # each item designates the name of the column that the item will diff --git a/posthog/tasks/exports/test/test_csv_exporter.py b/posthog/tasks/exports/test/test_csv_exporter.py index 62ca713517f0e..65fda3baa0dd4 100644 --- a/posthog/tasks/exports/test/test_csv_exporter.py +++ b/posthog/tasks/exports/test/test_csv_exporter.py @@ -19,7 +19,10 @@ from posthog.storage import object_storage from posthog.storage.object_storage import ObjectStorageError from posthog.tasks.exports import csv_exporter -from posthog.tasks.exports.csv_exporter import UnexpectedEmptyJsonResponse, add_query_params +from posthog.tasks.exports.csv_exporter import ( + UnexpectedEmptyJsonResponse, + add_query_params, +) from posthog.test.base import APIBaseTest, _create_event, flush_persons_and_events from posthog.utils import absolute_uri @@ -257,7 +260,10 @@ def test_limiting_query_as_expected(self) -> None: with self.settings(SITE_URL="https://app.posthog.com"): modified_url = add_query_params(absolute_uri(regression_11204), {"limit": "3500"}) actual_bits = self._split_to_dict(modified_url) - expected_bits = {**self._split_to_dict(regression_11204), **{"limit": "3500"}} + expected_bits = { + **self._split_to_dict(regression_11204), + **{"limit": "3500"}, + } assert expected_bits == actual_bits def test_limiting_existing_limit_query_as_expected(self) -> None: @@ -265,7 +271,10 @@ def test_limiting_existing_limit_query_as_expected(self) -> None: url_with_existing_limit = regression_11204 + "&limit=100000" modified_url = add_query_params(absolute_uri(url_with_existing_limit), {"limit": "3500"}) actual_bits = self._split_to_dict(modified_url) - expected_bits = {**self._split_to_dict(regression_11204), **{"limit": "3500"}} + expected_bits = { + **self._split_to_dict(regression_11204), + **{"limit": "3500"}, + } assert expected_bits == actual_bits @patch("posthog.tasks.exports.csv_exporter.make_api_call") @@ -341,7 +350,11 @@ def test_csv_exporter_events_query(self, mocked_uuidt, MAX_SELECT_RETURNED_ROWS= team=self.team, export_format=ExportedAsset.ExportFormat.CSV, export_context={ - "source": {"kind": "EventsQuery", "select": ["event", "*"], "where": [f"distinct_id = '{random_uuid}'"]} + "source": { + "kind": "EventsQuery", + "select": ["event", "*"], + "where": [f"distinct_id = '{random_uuid}'"], + } }, ) exported_asset.save() diff --git a/posthog/tasks/exports/test/test_csv_exporter_renders.py b/posthog/tasks/exports/test/test_csv_exporter_renders.py index 26cb67c08885a..f17e64635370b 100644 --- a/posthog/tasks/exports/test/test_csv_exporter_renders.py +++ b/posthog/tasks/exports/test/test_csv_exporter_renders.py @@ -33,7 +33,9 @@ def test_csv_rendering(mock_settings, mock_request, filename): fixture = json.load(f) asset = ExportedAsset( - team=team, export_format=ExportedAsset.ExportFormat.CSV, export_context={"path": "/api/literally/anything"} + team=team, + export_format=ExportedAsset.ExportFormat.CSV, + export_context={"path": "/api/literally/anything"}, ) asset.save() diff --git a/posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py b/posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py index 70161c42b6244..b78a870e626c4 100644 --- a/posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py +++ b/posthog/tasks/exports/test/test_csv_exporter_url_sanitising.py @@ -16,10 +16,26 @@ def test_sanitize_url_when_provided_path_and_site_url_has_a_port(self) -> None: assert sanitised == "https://localhost:8000/some/location" error_test_cases = [ - ("changing scheme", "https://localhost:8000", "http://localhost:8000/some/location"), - ("changing port", "https://localhost:8000", "https://localhost:8123/some/location"), - ("changing port and url", "https://something.posthog.com:8000", "https://localhost:8123/some/location"), - ("changing domain", "https://app.posthog.com", "https://google.com/some/location"), + ( + "changing scheme", + "https://localhost:8000", + "http://localhost:8000/some/location", + ), + ( + "changing port", + "https://localhost:8000", + "https://localhost:8123/some/location", + ), + ( + "changing port and url", + "https://something.posthog.com:8000", + "https://localhost:8123/some/location", + ), + ( + "changing domain", + "https://app.posthog.com", + "https://google.com/some/location", + ), ] @parameterized.expand(error_test_cases) diff --git a/posthog/tasks/exports/test/test_image_exporter.py b/posthog/tasks/exports/test/test_image_exporter.py index 948500a9d77b8..3c3a84133a1b1 100644 --- a/posthog/tasks/exports/test/test_image_exporter.py +++ b/posthog/tasks/exports/test/test_image_exporter.py @@ -20,7 +20,11 @@ @patch("posthog.tasks.exports.image_exporter.synchronously_update_cache") @patch("posthog.tasks.exports.image_exporter._screenshot_asset") -@patch("posthog.tasks.exports.image_exporter.open", new_callable=mock_open, read_data=b"image_data") +@patch( + "posthog.tasks.exports.image_exporter.open", + new_callable=mock_open, + read_data=b"image_data", +) @patch("os.remove") class TestImageExporter(APIBaseTest): exported_asset: ExportedAsset @@ -28,7 +32,9 @@ class TestImageExporter(APIBaseTest): def setup_method(self, method): insight = Insight.objects.create(team=self.team) asset = ExportedAsset.objects.create( - team=self.team, export_format=ExportedAsset.ExportFormat.PNG, insight=insight + team=self.team, + export_format=ExportedAsset.ExportFormat.PNG, + insight=insight, ) self.exported_asset = asset diff --git a/posthog/tasks/test/test_async_migrations.py b/posthog/tasks/test/test_async_migrations.py index bb7cfce0797e7..27bb8fc991b8a 100644 --- a/posthog/tasks/test/test_async_migrations.py +++ b/posthog/tasks/test/test_async_migrations.py @@ -6,7 +6,10 @@ from celery.result import AsyncResult from posthog.async_migrations.examples.test_migration import Migration -from posthog.async_migrations.runner import run_async_migration_next_op, run_async_migration_operations +from posthog.async_migrations.runner import ( + run_async_migration_next_op, + run_async_migration_operations, +) from posthog.async_migrations.test.util import create_async_migration from posthog.models.async_migration import AsyncMigration, MigrationStatus from posthog.models.instance_setting import set_instance_setting @@ -45,7 +48,10 @@ def setUp(self) -> None: @pytest.mark.ee @patch.object(AsyncResult, "state", states.STARTED) @patch("posthog.celery.app.control.inspect", side_effect=inspect_mock) - @patch("posthog.tasks.async_migrations.run_async_migration.delay", side_effect=run_async_migration_mock) + @patch( + "posthog.tasks.async_migrations.run_async_migration.delay", + side_effect=run_async_migration_mock, + ) def test_check_async_migration_health_during_resumable_op(self, _: Any, __: Any) -> None: """ Mocks celery tasks and tests that `check_async_migration_health` works as expected @@ -76,7 +82,10 @@ def test_check_async_migration_health_during_resumable_op(self, _: Any, __: Any) @pytest.mark.ee @patch.object(AsyncResult, "state", states.STARTED) @patch("posthog.celery.app.control.inspect", side_effect=inspect_mock) - @patch("posthog.tasks.async_migrations.run_async_migration.delay", side_effect=run_async_migration_mock) + @patch( + "posthog.tasks.async_migrations.run_async_migration.delay", + side_effect=run_async_migration_mock, + ) def test_check_async_migration_health_during_non_resumable_op(self, _: Any, __: Any) -> None: """ Same as above, but now we find a non-resumbale op. diff --git a/posthog/tasks/test/test_calculate_cohort.py b/posthog/tasks/test/test_calculate_cohort.py index 749387c2a6344..0c81076c8fa81 100644 --- a/posthog/tasks/test/test_calculate_cohort.py +++ b/posthog/tasks/test/test_calculate_cohort.py @@ -71,7 +71,10 @@ def test_calculate_cohorts(self) -> None: team=self.team, filters={ "groups": [ - {"properties": [{"key": "id", "type": "cohort", "value": 267}], "rollout_percentage": None} + { + "properties": [{"key": "id", "type": "cohort", "value": 267}], + "rollout_percentage": None, + } ] }, key="default-flag-1", diff --git a/posthog/tasks/test/test_check_clickhouse_schema_drift.py b/posthog/tasks/test/test_check_clickhouse_schema_drift.py index 831e4ffbc1c3d..8d38d134cac40 100644 --- a/posthog/tasks/test/test_check_clickhouse_schema_drift.py +++ b/posthog/tasks/test/test_check_clickhouse_schema_drift.py @@ -2,7 +2,10 @@ from clickhouse_driver.errors import Error as ClickhouseError -from posthog.tasks.check_clickhouse_schema_drift import check_clickhouse_schema_drift, get_clickhouse_schema_drift +from posthog.tasks.check_clickhouse_schema_drift import ( + check_clickhouse_schema_drift, + get_clickhouse_schema_drift, +) def test_get_clickhouse_schema_drift() -> None: diff --git a/posthog/tasks/test/test_email.py b/posthog/tasks/test/test_email.py index 9ef1f27907908..a728879586aad 100644 --- a/posthog/tasks/test/test_email.py +++ b/posthog/tasks/test/test_email.py @@ -28,7 +28,10 @@ def create_org_team_and_user(creation_date: str, email: str, ingested_event: boo org = Organization.objects.create(name="too_late_org") Team.objects.create(organization=org, name="Default Project", ingested_event=ingested_event) user = User.objects.create_and_join( - organization=org, email=email, password=None, level=OrganizationMembership.Level.OWNER + organization=org, + email=email, + password=None, + level=OrganizationMembership.Level.OWNER, ) return org, user @@ -47,7 +50,11 @@ def setUpTestData(cls) -> None: set_instance_setting("EMAIL_HOST", "fake_host") set_instance_setting("EMAIL_ENABLED", True) create_org_team_and_user("2022-01-01 00:00:00", "too_late_user@posthog.com") - create_org_team_and_user("2022-01-02 00:00:00", "ingested_event_in_range_user@posthog.com", ingested_event=True) + create_org_team_and_user( + "2022-01-02 00:00:00", + "ingested_event_in_range_user@posthog.com", + ingested_event=True, + ) create_org_team_and_user("2022-01-03 00:00:00", "too_early_user@posthog.com") def test_send_invite(self, MockEmailMessage: MagicMock) -> None: @@ -68,7 +75,10 @@ def test_send_member_join(self, MockEmailMessage: MagicMock) -> None: org, user = create_org_team_and_user("2022-01-02 00:00:00", "admin@posthog.com") user = User.objects.create_and_join( - organization=org, email="new-user@posthog.com", password=None, level=OrganizationMembership.Level.MEMBER + organization=org, + email="new-user@posthog.com", + password=None, + level=OrganizationMembership.Level.MEMBER, ) send_member_join(user.uuid, org.id) diff --git a/posthog/tasks/test/test_usage_report.py b/posthog/tasks/test/test_usage_report.py index ec758a24fd548..715c3829855d2 100644 --- a/posthog/tasks/test/test_usage_report.py +++ b/posthog/tasks/test/test_usage_report.py @@ -28,7 +28,9 @@ from posthog.models.plugin import PluginConfig from posthog.models.sharing_configuration import SharingConfiguration from posthog.schema import EventsQuery -from posthog.session_recordings.queries.test.session_replay_sql import produce_replay_summary +from posthog.session_recordings.queries.test.session_replay_sql import ( + produce_replay_summary, +) from posthog.tasks.usage_report import ( _get_all_org_reports, _get_all_usage_data_as_team_rows, @@ -109,7 +111,10 @@ def _create_sample_usage_data(self) -> None: created_by=self.user, ) SharingConfiguration.objects.create( - team=self.org_1_team_1, dashboard=dashboard, access_token="testtoken", enabled=True + team=self.org_1_team_1, + dashboard=dashboard, + access_token="testtoken", + enabled=True, ) FeatureFlag.objects.create( @@ -184,7 +189,10 @@ def _create_sample_usage_data(self) -> None: GroupTypeMapping.objects.create(team=self.org_1_team_1, group_type="organization", group_type_index=0) GroupTypeMapping.objects.create(team=self.org_1_team_1, group_type="company", group_type_index=1) create_group( - team_id=self.org_1_team_1.pk, group_type_index=0, group_key="org:5", properties={"industry": "finance"} + team_id=self.org_1_team_1.pk, + group_type_index=0, + group_key="org:5", + properties={"industry": "finance"}, ) create_group( team_id=self.org_1_team_1.pk, @@ -323,7 +331,10 @@ def _test_usage_report(self) -> List[dict]: period_start, period_end = period all_reports = _get_all_org_reports(period_start, period_end) report = _get_full_org_usage_report_as_dict( - _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period)) + _get_full_org_usage_report( + all_reports[str(self.organization.id)], + get_instance_metadata(period), + ) ) assert report["table_sizes"] @@ -349,7 +360,10 @@ def _test_usage_report(self) -> List[dict]: "users_who_signed_up": [], "users_who_signed_up_count": 0, "table_sizes": report["table_sizes"], - "plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1}, + "plugins_installed": { + "Installed and enabled": 1, + "Installed but not enabled": 1, + }, "plugins_enabled": {"Installed and enabled": 1}, "instance_tag": "none", "event_count_lifetime": 55, @@ -480,7 +494,10 @@ def _test_usage_report(self) -> List[dict]: "users_who_signed_up": [], "users_who_signed_up_count": 0, "table_sizes": report["table_sizes"], - "plugins_installed": {"Installed and enabled": 1, "Installed but not enabled": 1}, + "plugins_installed": { + "Installed and enabled": 1, + "Installed but not enabled": 1, + }, "plugins_enabled": {"Installed and enabled": 1}, "instance_tag": "none", "event_count_lifetime": 11, @@ -571,7 +588,8 @@ def _test_usage_report(self) -> List[dict]: for expectation in expectations: report = _get_full_org_usage_report_as_dict( _get_full_org_usage_report( - all_reports[expectation["organization_id"]], get_instance_metadata(period) + all_reports[expectation["organization_id"]], + get_instance_metadata(period), ) ) assert report == expectation @@ -634,7 +652,11 @@ def test_usage_report_hogql_queries(self) -> None: sync_execute("SYSTEM FLUSH LOGS") sync_execute("TRUNCATE TABLE system.query_log") - execute_hogql_query(query="select * from events limit 200", team=self.team, query_type="HogQLQuery") + execute_hogql_query( + query="select * from events limit 200", + team=self.team, + query_type="HogQLQuery", + ) EventsQueryRunner(query=EventsQuery(select=["event"], limit=50), team=self.team).calculate() sync_execute("SYSTEM FLUSH LOGS") @@ -881,7 +903,10 @@ def test_usage_report_survey_responses(self, billing_task_mock: MagicMock, posth _create_event( distinct_id="3", event="survey sent", - properties={"$survey_id": "seeeep-o12-as124", "$survey_response": "correct"}, + properties={ + "$survey_id": "seeeep-o12-as124", + "$survey_response": "correct", + }, timestamp=now() - relativedelta(hours=i), team=self.analytics_team, ) @@ -890,7 +915,10 @@ def test_usage_report_survey_responses(self, billing_task_mock: MagicMock, posth _create_event( distinct_id="4", event="survey sent", - properties={"$survey_id": "see22eep-o12-as124", "$survey_response": "correct"}, + properties={ + "$survey_id": "see22eep-o12-as124", + "$survey_response": "correct", + }, timestamp=now() - relativedelta(hours=i), team=self.org_1_team_1, ) @@ -958,17 +986,42 @@ def setUp(self) -> None: self.team2 = Team.objects.create(organization=self.organization) - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z") + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-08T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T12:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T13:01:01Z", + ) _create_event( event="$$internal_metrics_shouldnt_be_billed", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z", ) - _create_event(event="$pageview", team=self.team2, distinct_id=1, timestamp="2021-10-09T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z") + _create_event( + event="$pageview", + team=self.team2, + distinct_id=1, + timestamp="2021-10-09T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-10T14:01:01Z", + ) flush_persons_and_events() TEST_clear_instance_license_cache() @@ -1041,7 +1094,10 @@ def test_send_usage_cloud(self, mock_post: MagicMock, mock_client: MagicMock) -> period_start, period_end = period all_reports = _get_all_org_reports(period_start, period_end) full_report_as_dict = _get_full_org_usage_report_as_dict( - _get_full_org_usage_report(all_reports[str(self.organization.id)], get_instance_metadata(period)) + _get_full_org_usage_report( + all_reports[str(self.organization.id)], + get_instance_metadata(period), + ) ) send_all_org_usage_reports(dry_run=False) license = License.objects.first() @@ -1057,7 +1113,10 @@ def test_send_usage_cloud(self, mock_post: MagicMock, mock_client: MagicMock) -> self.user.distinct_id, "organization usage report", {**full_report_as_dict, "scope": "user"}, - groups={"instance": "http://localhost:8000", "organization": str(self.organization.id)}, + groups={ + "instance": "http://localhost:8000", + "organization": str(self.organization.id), + }, timestamp=None, ) @@ -1134,7 +1193,13 @@ def test_capture_event_called_with_string_timestamp(self, mock_client: MagicMock organization = Organization.objects.create() mock_posthog = MagicMock() mock_client.return_value = mock_posthog - capture_event(mock_client, "test event", organization.id, {"prop1": "val1"}, "2021-10-10T23:01:00.00Z") + capture_event( + mock_client, + "test event", + organization.id, + {"prop1": "val1"}, + "2021-10-10T23:01:00.00Z", + ) assert mock_client.capture.call_args[1]["timestamp"] == datetime(2021, 10, 10, 23, 1, tzinfo=tzutc()) @@ -1158,11 +1223,36 @@ class SendUsageNoLicenseTest(APIBaseTest): def test_no_license(self, mock_post: MagicMock, mock_client: MagicMock) -> None: TEST_clear_instance_license_cache() # Same test, we just don't include the LicensedTestMixin so no license - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-08T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T12:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T13:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-09T14:01:01Z") - _create_event(event="$pageview", team=self.team, distinct_id=1, timestamp="2021-10-10T14:01:01Z") + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-08T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T12:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T13:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-09T14:01:01Z", + ) + _create_event( + event="$pageview", + team=self.team, + distinct_id=1, + timestamp="2021-10-10T14:01:01Z", + ) flush_persons_and_events() diff --git a/posthog/tasks/usage_report.py b/posthog/tasks/usage_report.py index b150a75f88f12..a9a06ecbff7c5 100644 --- a/posthog/tasks/usage_report.py +++ b/posthog/tasks/usage_report.py @@ -41,7 +41,13 @@ from posthog.models.team.team import Team from posthog.models.utils import namedtuplefetchall from posthog.settings import CLICKHOUSE_CLUSTER, INSTANCE_TAG -from posthog.utils import get_helm_info_env, get_instance_realm, get_instance_region, get_machine_id, get_previous_day +from posthog.utils import ( + get_helm_info_env, + get_instance_realm, + get_instance_region, + get_machine_id, + get_previous_day, +) logger = structlog.get_logger(__name__) @@ -174,7 +180,10 @@ def get_instance_metadata(period: Tuple[datetime, datetime]) -> InstanceMetadata metadata = InstanceMetadata( deployment_infrastructure=os.getenv("DEPLOYMENT", "unknown"), realm=realm, - period={"start_inclusive": period_start.isoformat(), "end_inclusive": period_end.isoformat()}, + period={ + "start_inclusive": period_start.isoformat(), + "end_inclusive": period_end.isoformat(), + }, site_url=settings.SITE_URL, product=get_product_name(realm, has_license), # Non-cloud vars @@ -197,7 +206,12 @@ def get_instance_metadata(period: Tuple[datetime, datetime]) -> InstanceMetadata metadata.users_who_logged_in = [ {"id": user.id, "distinct_id": user.distinct_id} if user.anonymize_data - else {"id": user.id, "distinct_id": user.distinct_id, "first_name": user.first_name, "email": user.email} + else { + "id": user.id, + "distinct_id": user.distinct_id, + "first_name": user.first_name, + "email": user.email, + } for user in User.objects.filter(is_active=True, last_login__gte=period_start, last_login__lte=period_end) ] metadata.users_who_logged_in_count = len(metadata.users_who_logged_in) @@ -205,8 +219,17 @@ def get_instance_metadata(period: Tuple[datetime, datetime]) -> InstanceMetadata metadata.users_who_signed_up = [ {"id": user.id, "distinct_id": user.distinct_id} if user.anonymize_data - else {"id": user.id, "distinct_id": user.distinct_id, "first_name": user.first_name, "email": user.email} - for user in User.objects.filter(is_active=True, date_joined__gte=period_start, date_joined__lte=period_end) + else { + "id": user.id, + "distinct_id": user.distinct_id, + "first_name": user.first_name, + "email": user.email, + } + for user in User.objects.filter( + is_active=True, + date_joined__gte=period_start, + date_joined__lte=period_end, + ) ] metadata.users_who_signed_up_count = len(metadata.users_who_signed_up) @@ -243,7 +266,8 @@ def get_org_owner_or_first_user(organization_id: str) -> Optional[User]: user = membership.user else: capture_exception( - Exception("No user found for org while generating report"), {"org": {"organization_id": organization_id}} + Exception("No user found for org while generating report"), + {"org": {"organization_id": organization_id}}, ) return user @@ -288,7 +312,12 @@ def send_report_to_billing_service(org_id: str, report: Dict[str, Any]) -> None: logger.error(f"UsageReport failed sending to Billing for organization: {organization.id}: {err}") capture_exception(err) pha_client = Client("sTMFPsFhdP1Ssg") - capture_event(pha_client, f"organization usage report to billing service failure", org_id, {"err": str(err)}) + capture_event( + pha_client, + f"organization usage report to billing service failure", + org_id, + {"err": str(err)}, + ) raise err @@ -496,7 +525,12 @@ def get_teams_with_hogql_metric( AND access_method = %(access_method)s GROUP BY team_id """, - {"begin": begin, "end": end, "query_types": query_types, "access_method": access_method}, + { + "begin": begin, + "end": end, + "query_types": query_types, + "access_method": access_method, + }, workload=Workload.OFFLINE, settings=CH_BILLING_SETTINGS, ) @@ -559,7 +593,10 @@ def get_teams_with_survey_responses_count_in_period( @app.task(ignore_result=True, max_retries=0) def capture_report( - capture_event_name: str, org_id: str, full_report_dict: Dict[str, Any], at_date: Optional[datetime] = None + capture_event_name: str, + org_id: str, + full_report_dict: Dict[str, Any], + at_date: Optional[datetime] = None, ) -> None: pha_client = Client("sTMFPsFhdP1Ssg") try: @@ -821,7 +858,10 @@ def _get_team_report(all_data: Dict[str, Any], team: Team) -> UsageReportCounter def _add_team_report_to_org_reports( - org_reports: Dict[str, OrgReport], team: Team, team_report: UsageReportCounters, period_start: datetime + org_reports: Dict[str, OrgReport], + team: Team, + team_report: UsageReportCounters, + period_start: datetime, ) -> None: org_id = str(team.organization.id) if org_id not in org_reports: diff --git a/posthog/tasks/user_identify.py b/posthog/tasks/user_identify.py index 9235410582eca..93dd0c851dbe8 100644 --- a/posthog/tasks/user_identify.py +++ b/posthog/tasks/user_identify.py @@ -6,6 +6,9 @@ @app.task(ignore_result=True) def identify_task(user_id: int) -> None: - user = User.objects.get(id=user_id) - posthoganalytics.capture(user.distinct_id, "update user properties", {"$set": user.get_analytics_metadata()}) + posthoganalytics.capture( + user.distinct_id, + "update user properties", + {"$set": user.get_analytics_metadata()}, + ) diff --git a/posthog/tasks/verify_persons_data_in_sync.py b/posthog/tasks/verify_persons_data_in_sync.py index d5cf24d9ad220..8aea487d96279 100644 --- a/posthog/tasks/verify_persons_data_in_sync.py +++ b/posthog/tasks/verify_persons_data_in_sync.py @@ -53,7 +53,9 @@ def verify_persons_data_in_sync( max_pk = Person.objects.filter(created_at__lte=now() - period_start).latest("id").id person_data = list( Person.objects.filter( - pk__lte=max_pk, pk__gte=max_pk - LIMIT * 5, created_at__gte=now() - period_end + pk__lte=max_pk, + pk__gte=max_pk - LIMIT * 5, + created_at__gte=now() - period_end, ).values_list("id", "uuid", "team_id")[:limit] ) person_data.sort(key=lambda row: row[2]) # keep persons from same team together @@ -94,11 +96,15 @@ def _team_integrity_statistics(person_data: List[Any]) -> Counter: ) ch_persons = _index_by( - sync_execute(GET_PERSON_CH_QUERY, {"person_ids": person_uuids, "team_ids": team_ids}), lambda row: row[0] + sync_execute(GET_PERSON_CH_QUERY, {"person_ids": person_uuids, "team_ids": team_ids}), + lambda row: row[0], ) ch_distinct_ids_mapping = _index_by( - sync_execute(GET_DISTINCT_IDS_CH_QUERY, {"person_ids": person_uuids, "team_ids": team_ids}), + sync_execute( + GET_DISTINCT_IDS_CH_QUERY, + {"person_ids": person_uuids, "team_ids": team_ids}, + ), lambda row: row[1], flat=False, ) diff --git a/posthog/templates/head.html b/posthog/templates/head.html index ed0d359faa014..7ca827ae15914 100644 --- a/posthog/templates/head.html +++ b/posthog/templates/head.html @@ -36,6 +36,11 @@ window.SENTRY_ENVIRONMENT = '{{ sentry_environment | escapejs }}'; {% endif %} +{% if js_maplibre_style_url %} + +{% endif %}