diff --git a/.github/workflows/ci-backend.yml b/.github/workflows/ci-backend.yml index ebc524c8aa509..70db671975d6a 100644 --- a/.github/workflows/ci-backend.yml +++ b/.github/workflows/ci-backend.yml @@ -125,11 +125,11 @@ jobs: - name: Check for syntax errors, import sort, and code style violations run: | - ruff . + ruff check . - name: Check formatting run: | - black --exclude posthog/hogql/grammar --check --diff . + ruff format --exclude posthog/hogql/grammar --check --diff . - name: Check static typing run: | diff --git a/ee/api/explicit_team_member.py b/ee/api/explicit_team_member.py index 723e317a86a55..edd5692453b69 100644 --- a/ee/api/explicit_team_member.py +++ b/ee/api/explicit_team_member.py @@ -48,7 +48,7 @@ def create(self, validated_data): user_uuid = validated_data.pop("user_uuid") validated_data["team"] = team try: - requesting_parent_membership: (OrganizationMembership) = OrganizationMembership.objects.get( + requesting_parent_membership: OrganizationMembership = OrganizationMembership.objects.get( organization_id=team.organization_id, user__uuid=user_uuid, user__is_active=True, diff --git a/ee/api/role.py b/ee/api/role.py index 7114ef4f0110f..7e7c8714c537e 100644 --- a/ee/api/role.py +++ b/ee/api/role.py @@ -25,7 +25,7 @@ class RolePermissions(BasePermission): def has_permission(self, request, view): organization = request.user.organization - requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get( + requesting_membership: OrganizationMembership = OrganizationMembership.objects.get( user_id=cast(User, request.user).id, organization=organization, ) diff --git a/ee/clickhouse/models/test/test_filters.py b/ee/clickhouse/models/test/test_filters.py index f388e383f1c12..26ff79c565c4e 100644 --- a/ee/clickhouse/models/test/test_filters.py +++ b/ee/clickhouse/models/test/test_filters.py @@ -1219,13 +1219,16 @@ def test_person_cohort_properties(self): ) query = """ SELECT distinct_id FROM person_distinct_id2 WHERE team_id = %(team_id)s {prop_clause} - """.format( - prop_clause=prop_clause - ) + """.format(prop_clause=prop_clause) # get distinct_id column of result - result = sync_execute(query, {"team_id": self.team.pk, **prop_clause_params, **filter.hogql_context.values,},)[ - 0 - ][0] + result = sync_execute( + query, + { + "team_id": self.team.pk, + **prop_clause_params, + **filter.hogql_context.values, + }, + )[0][0] self.assertEqual(result, person1_distinct_id) # test cohort2 with negation @@ -1241,13 +1244,16 @@ def test_person_cohort_properties(self): ) query = """ SELECT distinct_id FROM person_distinct_id2 WHERE team_id = %(team_id)s {prop_clause} - """.format( - prop_clause=prop_clause - ) + """.format(prop_clause=prop_clause) # get distinct_id column of result - result = sync_execute(query, {"team_id": self.team.pk, **prop_clause_params, **filter.hogql_context.values,},)[ - 0 - ][0] + result = sync_execute( + query, + { + "team_id": self.team.pk, + **prop_clause_params, + **filter.hogql_context.values, + }, + )[0][0] self.assertEqual(result, person2_distinct_id) diff --git a/ee/clickhouse/queries/funnels/funnel_correlation_persons.py b/ee/clickhouse/queries/funnels/funnel_correlation_persons.py index 3b83344d502aa..6a0cfe3655103 100644 --- a/ee/clickhouse/queries/funnels/funnel_correlation_persons.py +++ b/ee/clickhouse/queries/funnels/funnel_correlation_persons.py @@ -52,7 +52,11 @@ def actor_query(self, limit_actors: Optional[bool] = True): def get_actors( self, - ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int,]: + ) -> Tuple[ + Union[QuerySet[Person], QuerySet[Group]], + Union[List[SerializedGroup], List[SerializedPerson]], + int, + ]: if self._filter.correlation_type == FunnelCorrelationType.PROPERTIES: return _FunnelPropertyCorrelationActors(self._filter, self._team, self._base_uri).get_actors() else: diff --git a/ee/models/role.py b/ee/models/role.py index 61908ff83149e..5284972bd7cc1 100644 --- a/ee/models/role.py +++ b/ee/models/role.py @@ -12,7 +12,7 @@ class Role(UUIDModel): related_name="roles", related_query_name="role", ) - feature_flags_access_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField( + feature_flags_access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( default=OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT, choices=OrganizationResourceAccess.AccessLevel.choices, ) diff --git a/ee/session_recordings/session_recording_playlist.py b/ee/session_recordings/session_recording_playlist.py index f2cfbbef4dc2f..7bc471be5e021 100644 --- a/ee/session_recordings/session_recording_playlist.py +++ b/ee/session_recordings/session_recording_playlist.py @@ -265,9 +265,7 @@ def modify_recordings( return response.Response({"success": True}) if request.method == "DELETE": - playlist_item = SessionRecordingPlaylistItem.objects.get( - playlist=playlist, recording=session_recording_id - ) # type: ignore + playlist_item = SessionRecordingPlaylistItem.objects.get(playlist=playlist, recording=session_recording_id) # type: ignore if playlist_item: playlist_item.delete() diff --git a/ee/tasks/test/test_calculate_cohort.py b/ee/tasks/test/test_calculate_cohort.py index 24cb0a3783f4d..c5264bbe12631 100644 --- a/ee/tasks/test/test_calculate_cohort.py +++ b/ee/tasks/test/test_calculate_cohort.py @@ -12,9 +12,7 @@ from posthog.test.base import ClickhouseTestMixin, _create_event, _create_person -class TestClickhouseCalculateCohort( - ClickhouseTestMixin, calculate_cohort_test_factory(_create_event, _create_person) -): # type: ignore +class TestClickhouseCalculateCohort(ClickhouseTestMixin, calculate_cohort_test_factory(_create_event, _create_person)): # type: ignore @patch("posthog.tasks.calculate_cohort.insert_cohort_from_insight_filter.delay") def test_create_stickiness_cohort(self, _insert_cohort_from_insight_filter): _create_person(team_id=self.team.pk, distinct_ids=["blabla"]) diff --git a/package.json b/package.json index 8f6d3566f8c1b..81650666c1102 100644 --- a/package.json +++ b/package.json @@ -38,7 +38,7 @@ "build:esbuild": "node frontend/build.mjs", "schema:build": "pnpm run schema:build:json && pnpm run schema:build:python", "schema:build:json": "ts-json-schema-generator -f tsconfig.json --path 'frontend/src/queries/schema.ts' --no-type-check > frontend/src/queries/schema.json && prettier --write frontend/src/queries/schema.json", - "schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && black posthog/schema.py", + "schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py", "grammar:build": "npm run grammar:build:python && npm run grammar:build:cpp", "grammar:build:python": "cd posthog/hogql/grammar && antlr -Dlanguage=Python3 HogQLLexer.g4 && antlr -visitor -no-listener -Dlanguage=Python3 HogQLParser.g4", "grammar:build:cpp": "cd posthog/hogql/grammar && antlr -o ../../../hogql_parser -Dlanguage=Cpp HogQLLexer.g4 && antlr -o ../../../hogql_parser -visitor -no-listener -Dlanguage=Cpp HogQLParser.g4", @@ -54,7 +54,7 @@ "typegen:check": "kea-typegen check", "typegen:watch": "kea-typegen watch --delete --show-ts-errors", "typegen:clean": "find frontend/src -type f -name '*Type.ts' -delete", - "format:python": "black . && isort .", + "format:python": "ruff --exclude posthog/hogql/grammar .", "format:js": "pnpm prettier && pnpm eslint --fix", "format": "pnpm format:python && pnpm format:js", "storybook": "storybook dev -p 6006", @@ -305,8 +305,8 @@ "pnpm --dir plugin-server exec prettier --write" ], "!(posthog/hogql/grammar/*)*.{py,pyi}": [ - "black", - "ruff" + "ruff format", + "ruff check" ], "!(HogQL*)*.{c,cpp,h,hpp}": "clang-format -i" }, diff --git a/posthog/api/feature_flag.py b/posthog/api/feature_flag.py index 6e61251818f48..1c17722525b98 100644 --- a/posthog/api/feature_flag.py +++ b/posthog/api/feature_flag.py @@ -85,10 +85,10 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo is_simple_flag = serializers.SerializerMethodField() rollout_percentage = serializers.SerializerMethodField() - experiment_set: (serializers.PrimaryKeyRelatedField) = serializers.PrimaryKeyRelatedField(many=True, read_only=True) + experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True) surveys: serializers.SerializerMethodField = serializers.SerializerMethodField() features: serializers.SerializerMethodField = serializers.SerializerMethodField() - usage_dashboard: (serializers.PrimaryKeyRelatedField) = serializers.PrimaryKeyRelatedField(read_only=True) + usage_dashboard: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(read_only=True) analytics_dashboards = serializers.PrimaryKeyRelatedField( many=True, required=False, diff --git a/posthog/api/notebook.py b/posthog/api/notebook.py index 103261dca90e6..4781523e059f2 100644 --- a/posthog/api/notebook.py +++ b/posthog/api/notebook.py @@ -273,8 +273,7 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query queryset = queryset.filter( # some notebooks have no text_content until next saved, so we need to check the title too # TODO this can be removed once all/most notebooks have text_content - Q(title__search=request.GET["search"]) - | Q(text_content__search=request.GET["search"]) + Q(title__search=request.GET["search"]) | Q(text_content__search=request.GET["search"]) ) elif key == "contains": contains = request.GET["contains"] diff --git a/posthog/api/organization_member.py b/posthog/api/organization_member.py index d6d90c1280d06..11f3c08e30411 100644 --- a/posthog/api/organization_member.py +++ b/posthog/api/organization_member.py @@ -26,7 +26,7 @@ def has_object_permission(self, request: Request, view, membership: Organization if request.method in SAFE_METHODS: return True organization = extract_organization(membership) - requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get( + requesting_membership: OrganizationMembership = OrganizationMembership.objects.get( user_id=cast(User, request.user).id, organization=organization, ) @@ -66,7 +66,7 @@ def get_has_social_auth(self, instance: OrganizationMembership) -> bool: def update(self, updated_membership, validated_data, **kwargs): updated_membership = cast(OrganizationMembership, updated_membership) raise_errors_on_nested_writes("update", self, validated_data) - requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get( + requesting_membership: OrganizationMembership = OrganizationMembership.objects.get( organization=updated_membership.organization, user=self.context["request"].user, ) diff --git a/posthog/api/signup.py b/posthog/api/signup.py index 511f425b71000..5f866d2efda59 100644 --- a/posthog/api/signup.py +++ b/posthog/api/signup.py @@ -502,7 +502,9 @@ def social_create_user( user=user.id if user else None, ) if user: - backend_processor = "domain_whitelist" # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes) + backend_processor = ( + "domain_whitelist" + ) # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes) from_invite = True # jit_provisioning means they're definitely not organization_first_user if not user: diff --git a/posthog/api/team.py b/posthog/api/team.py index 2c076a6a8d8b3..f829dd23700c2 100644 --- a/posthog/api/team.py +++ b/posthog/api/team.py @@ -204,7 +204,7 @@ def validate(self, attrs: Any) -> Any: organization_id = self.instance.organization_id else: organization_id = self.context["view"].organization - org_membership: (OrganizationMembership) = OrganizationMembership.objects.only("level").get( + org_membership: OrganizationMembership = OrganizationMembership.objects.only("level").get( organization_id=organization_id, user=request.user ) if org_membership.level < OrganizationMembership.Level.ADMIN: diff --git a/posthog/api/test/test_event.py b/posthog/api/test/test_event.py index dfd24b6589278..3b0c8a11d471b 100644 --- a/posthog/api/test/test_event.py +++ b/posthog/api/test/test_event.py @@ -446,9 +446,10 @@ def test_pagination(self): from posthog.client import sync_execute self.assertEqual( - sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk},)[ - 0 - ][0], + sync_execute( + "select count(*) from events where team_id = %(team_id)s", + {"team_id": self.team.pk}, + )[0][0], 250, ) @@ -495,9 +496,10 @@ def test_pagination_bounded_date_range(self): from posthog.client import sync_execute self.assertEqual( - sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk},)[ - 0 - ][0], + sync_execute( + "select count(*) from events where team_id = %(team_id)s", + {"team_id": self.team.pk}, + )[0][0], 25, ) diff --git a/posthog/api/test/test_person.py b/posthog/api/test/test_person.py index 549e13177f26c..815f38c472978 100644 --- a/posthog/api/test/test_person.py +++ b/posthog/api/test/test_person.py @@ -339,9 +339,10 @@ def test_delete_person(self): self.assertEqual([(100, 1, "{}")], ch_persons) # No async deletion is scheduled self.assertEqual(AsyncDeletion.objects.filter(team_id=self.team.id).count(), 0) - ch_events = sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[ - 0 - ][0] + ch_events = sync_execute( + "SELECT count() FROM events WHERE team_id = %(team_id)s", + {"team_id": self.team.pk}, + )[0][0] self.assertEqual(ch_events, 3) @freeze_time("2021-08-25T22:09:14.252Z") diff --git a/posthog/clickhouse/client/execute.py b/posthog/clickhouse/client/execute.py index 5f039c78c19f9..b588badfc07ea 100644 --- a/posthog/clickhouse/client/execute.py +++ b/posthog/clickhouse/client/execute.py @@ -84,7 +84,7 @@ def sync_execute( from posthog.test.base import flush_persons_and_events flush_persons_and_events() - except (ModuleNotFoundError): # when we run plugin server tests it tries to run above, ignore + except ModuleNotFoundError: # when we run plugin server tests it tries to run above, ignore pass with get_pool(workload, team_id, readonly).get_client() as client: diff --git a/posthog/clickhouse/migrations/0006_session_recording_events.py b/posthog/clickhouse/migrations/0006_session_recording_events.py index 5f9f1a8212261..732d189d45545 100644 --- a/posthog/clickhouse/migrations/0006_session_recording_events.py +++ b/posthog/clickhouse/migrations/0006_session_recording_events.py @@ -8,12 +8,12 @@ ) from posthog.settings.data_stores import CLICKHOUSE_CLUSTER -SESSION_RECORDING_EVENTS_MATERIALIZED_COLUMN_COMMENTS_SQL = lambda: """ +SESSION_RECORDING_EVENTS_MATERIALIZED_COLUMN_COMMENTS_SQL = ( + lambda: """ ALTER TABLE session_recording_events ON CLUSTER '{cluster}' COMMENT COLUMN has_full_snapshot 'column_materializer::has_full_snapshot' -""".format( - cluster=CLICKHOUSE_CLUSTER +""".format(cluster=CLICKHOUSE_CLUSTER) ) operations = [ diff --git a/posthog/clickhouse/migrations/0021_session_recording_events_materialize_full_snapshot.py b/posthog/clickhouse/migrations/0021_session_recording_events_materialize_full_snapshot.py index af680a54d9941..379970976354c 100644 --- a/posthog/clickhouse/migrations/0021_session_recording_events_materialize_full_snapshot.py +++ b/posthog/clickhouse/migrations/0021_session_recording_events_materialize_full_snapshot.py @@ -3,12 +3,12 @@ from posthog.client import sync_execute from posthog.settings import CLICKHOUSE_CLUSTER -SESSION_RECORDING_EVENTS_MATERIALIZED_COLUMN_COMMENTS_SQL = lambda: """ +SESSION_RECORDING_EVENTS_MATERIALIZED_COLUMN_COMMENTS_SQL = ( + lambda: """ ALTER TABLE session_recording_events ON CLUSTER '{cluster}' COMMENT COLUMN has_full_snapshot 'column_materializer::has_full_snapshot' -""".format( - cluster=CLICKHOUSE_CLUSTER +""".format(cluster=CLICKHOUSE_CLUSTER) ) diff --git a/posthog/demo/test/test_matrix_manager.py b/posthog/demo/test/test_matrix_manager.py index 25770553ab613..edc015c7b5aa9 100644 --- a/posthog/demo/test/test_matrix_manager.py +++ b/posthog/demo/test/test_matrix_manager.py @@ -85,9 +85,10 @@ def test_run_on_team(self): # At least one event for each cluster assert ( - sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[ - 0 - ][0] + sync_execute( + "SELECT count() FROM events WHERE team_id = %(team_id)s", + {"team_id": self.team.pk}, + )[0][0] >= 3 ) assert self.team.name == DummyMatrix.PRODUCT_NAME @@ -100,8 +101,9 @@ def test_run_on_team_using_pre_save(self): # At least one event for each cluster assert sync_execute("SELECT count() FROM events WHERE team_id = 0")[0][0] >= 3 assert ( - sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[ - 0 - ][0] + sync_execute( + "SELECT count() FROM events WHERE team_id = %(team_id)s", + {"team_id": self.team.pk}, + )[0][0] >= 3 ) diff --git a/posthog/hogql/database/database.py b/posthog/hogql/database/database.py index db2791c348d76..59580ccc6d8e7 100644 --- a/posthog/hogql/database/database.py +++ b/posthog/hogql/database/database.py @@ -61,10 +61,10 @@ class Database(BaseModel): cohort_people: CohortPeople = CohortPeople() static_cohort_people: StaticCohortPeople = StaticCohortPeople() log_entries: LogEntriesTable = LogEntriesTable() - console_logs_log_entries: (ReplayConsoleLogsLogEntriesTable) = ReplayConsoleLogsLogEntriesTable() + console_logs_log_entries: ReplayConsoleLogsLogEntriesTable = ReplayConsoleLogsLogEntriesTable() batch_export_log_entries: BatchExportLogEntriesTable = BatchExportLogEntriesTable() - raw_session_replay_events: (RawSessionReplayEventsTable) = RawSessionReplayEventsTable() + raw_session_replay_events: RawSessionReplayEventsTable = RawSessionReplayEventsTable() raw_person_distinct_ids: RawPersonDistinctIdsTable = RawPersonDistinctIdsTable() raw_persons: RawPersonsTable = RawPersonsTable() raw_groups: RawGroupsTable = RawGroupsTable() diff --git a/posthog/hogql_queries/sessions_timeline_query_runner.py b/posthog/hogql_queries/sessions_timeline_query_runner.py index 7d9d1d29a1646..abea2867e2b90 100644 --- a/posthog/hogql_queries/sessions_timeline_query_runner.py +++ b/posthog/hogql_queries/sessions_timeline_query_runner.py @@ -160,9 +160,7 @@ def calculate(self) -> SessionsTimelineQueryResponse: formal_session_id, informal_session_id, recording_duration_s, - ) in reversed( - query_result.results[: self.EVENT_LIMIT] - ): # The last result is a marker of more results + ) in reversed(query_result.results[: self.EVENT_LIMIT]): # The last result is a marker of more results entry_id = str(formal_session_id or informal_session_id) if entry_id not in timeline_entries_map: timeline_entries_map[entry_id] = TimelineEntry( diff --git a/posthog/models/async_migration.py b/posthog/models/async_migration.py index ab60eed94d0c5..885f7ce397931 100644 --- a/posthog/models/async_migration.py +++ b/posthog/models/async_migration.py @@ -33,7 +33,7 @@ class Meta: null=False, blank=False, default=MigrationStatus.NotStarted ) - current_operation_index: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField( + current_operation_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( null=False, blank=False, default=0 ) current_query_id: models.CharField = models.CharField(max_length=100, null=False, blank=False, default="") diff --git a/posthog/models/cohort/sql.py b/posthog/models/cohort/sql.py index 821e84e29fd37..d56e5507f9571 100644 --- a/posthog/models/cohort/sql.py +++ b/posthog/models/cohort/sql.py @@ -7,7 +7,8 @@ """ COHORTPEOPLE_TABLE_ENGINE = lambda: CollapsingMergeTree("cohortpeople", ver="sign") -CREATE_COHORTPEOPLE_TABLE_SQL = lambda: """ +CREATE_COHORTPEOPLE_TABLE_SQL = ( + lambda: """ CREATE TABLE IF NOT EXISTS cohortpeople ON CLUSTER '{cluster}' ( person_id UUID, @@ -19,9 +20,10 @@ Order By (team_id, cohort_id, person_id, version) {storage_policy} """.format( - cluster=CLICKHOUSE_CLUSTER, - engine=COHORTPEOPLE_TABLE_ENGINE(), - storage_policy="", + cluster=CLICKHOUSE_CLUSTER, + engine=COHORTPEOPLE_TABLE_ENGINE(), + storage_policy="", + ) ) TRUNCATE_COHORTPEOPLE_TABLE_SQL = f"TRUNCATE TABLE IF EXISTS cohortpeople ON CLUSTER '{CLICKHOUSE_CLUSTER}'" diff --git a/posthog/models/dashboard.py b/posthog/models/dashboard.py index f20fc9fdcb0f2..86af344be038e 100644 --- a/posthog/models/dashboard.py +++ b/posthog/models/dashboard.py @@ -52,7 +52,7 @@ class PrivilegeLevel(models.IntegerChoices): last_accessed_at: models.DateTimeField = models.DateTimeField(blank=True, null=True) filters: models.JSONField = models.JSONField(default=dict) creation_mode: models.CharField = models.CharField(max_length=16, default="default", choices=CreationMode.choices) - restriction_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField( + restriction_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( default=RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT, choices=RestrictionLevel.choices, ) diff --git a/posthog/models/event/sql.py b/posthog/models/event/sql.py index 871fec438a847..410904ba006d4 100644 --- a/posthog/models/event/sql.py +++ b/posthog/models/event/sql.py @@ -124,7 +124,8 @@ indexes="", ) -EVENTS_TABLE_JSON_MV_SQL = lambda: """ +EVENTS_TABLE_JSON_MV_SQL = ( + lambda: """ CREATE MATERIALIZED VIEW IF NOT EXISTS events_json_mv ON CLUSTER '{cluster}' TO {database}.{target_table} AS SELECT @@ -154,9 +155,10 @@ _offset FROM {database}.kafka_events_json """.format( - target_table=WRITABLE_EVENTS_DATA_TABLE(), - cluster=settings.CLICKHOUSE_CLUSTER, - database=settings.CLICKHOUSE_DATABASE, + target_table=WRITABLE_EVENTS_DATA_TABLE(), + cluster=settings.CLICKHOUSE_CLUSTER, + database=settings.CLICKHOUSE_DATABASE, + ) ) # Distributed engine tables are only created if CLICKHOUSE_REPLICATED @@ -387,9 +389,7 @@ GROUP BY tag_name, elements_chain ORDER BY tag_count desc, tag_name LIMIT %(limit)s -""".format( - tag_regex=EXTRACT_TAG_REGEX, text_regex=EXTRACT_TEXT_REGEX -) +""".format(tag_regex=EXTRACT_TAG_REGEX, text_regex=EXTRACT_TEXT_REGEX) GET_CUSTOM_EVENTS = """ SELECT DISTINCT event FROM events where team_id = %(team_id)s AND event NOT IN ['$autocapture', '$pageview', '$identify', '$pageleave', '$screen'] diff --git a/posthog/models/event/util.py b/posthog/models/event/util.py index 2cd36b34e1dd2..9fe98305d693a 100644 --- a/posthog/models/event/util.py +++ b/posthog/models/event/util.py @@ -156,9 +156,7 @@ def bulk_create_events(events: List[Dict[str, Any]], person_mapping: Optional[Di %(created_at_{i})s, now(), 0 - )""".format( - i=index - ) + )""".format(i=index) ) #  use person properties mapping to populate person properties in given event diff --git a/posthog/models/feature_flag/flag_matching.py b/posthog/models/feature_flag/flag_matching.py index 41da1ee13ce7d..fe57b171204e8 100644 --- a/posthog/models/feature_flag/flag_matching.py +++ b/posthog/models/feature_flag/flag_matching.py @@ -198,7 +198,10 @@ def get_match(self, feature_flag: FeatureFlag) -> FeatureFlagMatch: payload=payload, ) - (highest_priority_evaluation_reason, highest_priority_index,) = self.get_highest_priority_match_evaluation( + ( + highest_priority_evaluation_reason, + highest_priority_index, + ) = self.get_highest_priority_match_evaluation( highest_priority_evaluation_reason, highest_priority_index, evaluation_reason, diff --git a/posthog/models/filters/mixins/simplify.py b/posthog/models/filters/mixins/simplify.py index 4735a95e6a7d6..3b1e0eb426ba1 100644 --- a/posthog/models/filters/mixins/simplify.py +++ b/posthog/models/filters/mixins/simplify.py @@ -39,7 +39,8 @@ def simplify(self: T, team: "Team", **kwargs) -> T: if hasattr(result, "entities_to_dict"): for entity_type, entities in result.entities_to_dict().items(): updated_entities[entity_type] = [ - self._simplify_entity(team, entity_type, entity, **kwargs) for entity in entities # type: ignore + self._simplify_entity(team, entity_type, entity, **kwargs) # type: ignore + for entity in entities ] from posthog.models.property.util import clear_excess_levels diff --git a/posthog/models/filters/test/test_filter.py b/posthog/models/filters/test/test_filter.py index d7f60b149b93b..84b70bbd4d837 100644 --- a/posthog/models/filters/test/test_filter.py +++ b/posthog/models/filters/test/test_filter.py @@ -616,9 +616,7 @@ def _filter_persons(filter: Filter, team: Team): return [str(uuid) for uuid in persons.values_list("uuid", flat=True)] -class TestDjangoPropertiesToQ( - property_to_Q_test_factory(_filter_persons, _create_person), QueryMatchingTest -): # type: ignore +class TestDjangoPropertiesToQ(property_to_Q_test_factory(_filter_persons, _create_person), QueryMatchingTest): # type: ignore @snapshot_postgres_queries def test_array_property_as_string_on_persons(self): Person.objects.create( diff --git a/posthog/models/ingestion_warnings/sql.py b/posthog/models/ingestion_warnings/sql.py index 6f3023744f51f..31917ffeb5499 100644 --- a/posthog/models/ingestion_warnings/sql.py +++ b/posthog/models/ingestion_warnings/sql.py @@ -44,7 +44,8 @@ extra_fields="", ) -INGESTION_WARNINGS_MV_TABLE_SQL = lambda: """ +INGESTION_WARNINGS_MV_TABLE_SQL = ( + lambda: """ CREATE MATERIALIZED VIEW IF NOT EXISTS ingestion_warnings_mv ON CLUSTER '{cluster}' TO {database}.{target_table} AS SELECT @@ -58,9 +59,10 @@ _partition FROM {database}.kafka_ingestion_warnings """.format( - target_table="ingestion_warnings", - cluster=settings.CLICKHOUSE_CLUSTER, - database=settings.CLICKHOUSE_DATABASE, + target_table="ingestion_warnings", + cluster=settings.CLICKHOUSE_CLUSTER, + database=settings.CLICKHOUSE_DATABASE, + ) ) # This table is responsible for writing to sharded_ingestion_warnings based on a sharding key. diff --git a/posthog/models/organization.py b/posthog/models/organization.py index 700fea47658f1..4e1c7af79838c 100644 --- a/posthog/models/organization.py +++ b/posthog/models/organization.py @@ -122,7 +122,7 @@ class PluginsAccessLevel(models.IntegerChoices): slug: LowercaseSlugField = LowercaseSlugField(unique=True, max_length=MAX_SLUG_LENGTH) created_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) - plugins_access_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField( + plugins_access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( default=PluginsAccessLevel.CONFIG, choices=PluginsAccessLevel.choices, ) diff --git a/posthog/models/performance/sql.py b/posthog/models/performance/sql.py index 31914e858b9b9..4c6a97f34a615 100644 --- a/posthog/models/performance/sql.py +++ b/posthog/models/performance/sql.py @@ -92,9 +92,7 @@ navigation_type LowCardinality(String), unload_event_end Float64, unload_event_start Float64, -""".strip().rstrip( - "," -) +""".strip().rstrip(",") PERFORMANCE_EVENT_TABLE_ENGINE = lambda: MergeTreeEngine( "performance_events", replication_scheme=ReplicationScheme.SHARDED @@ -179,7 +177,8 @@ def _column_names_from_column_definitions(column_definitions: str) -> str: extra_fields=KAFKA_COLUMNS_WITH_PARTITION, ) -PERFORMANCE_EVENTS_TABLE_MV_SQL = lambda: """ +PERFORMANCE_EVENTS_TABLE_MV_SQL = ( + lambda: """ CREATE MATERIALIZED VIEW IF NOT EXISTS performance_events_mv ON CLUSTER '{cluster}' TO {database}.{target_table} AS SELECT @@ -187,11 +186,12 @@ def _column_names_from_column_definitions(column_definitions: str) -> str: ,{extra_fields} FROM {database}.kafka_performance_events """.format( - columns=_column_names_from_column_definitions(PERFORMANCE_EVENT_COLUMNS), - target_table="writeable_performance_events", - cluster=settings.CLICKHOUSE_CLUSTER, - database=settings.CLICKHOUSE_DATABASE, - extra_fields=_column_names_from_column_definitions(KAFKA_COLUMNS_WITH_PARTITION), + columns=_column_names_from_column_definitions(PERFORMANCE_EVENT_COLUMNS), + target_table="writeable_performance_events", + cluster=settings.CLICKHOUSE_CLUSTER, + database=settings.CLICKHOUSE_DATABASE, + extra_fields=_column_names_from_column_definitions(KAFKA_COLUMNS_WITH_PARTITION), + ) ) # TODO this should probably be a materialized view diff --git a/posthog/models/person/sql.py b/posthog/models/person/sql.py index ffb80869b9e9a..bf18c894f865c 100644 --- a/posthog/models/person/sql.py +++ b/posthog/models/person/sql.py @@ -74,9 +74,7 @@ _timestamp, _offset FROM {database}.kafka_{table_name} -""".format( - table_name=PERSONS_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE -) +""".format(table_name=PERSONS_TABLE, cluster=CLICKHOUSE_CLUSTER, database=CLICKHOUSE_DATABASE) GET_LATEST_PERSON_SQL = """ SELECT * FROM person JOIN ( @@ -94,9 +92,7 @@ (select id from ( {latest_person_sql} )) -""".format( - latest_person_sql=GET_LATEST_PERSON_SQL -) +""".format(latest_person_sql=GET_LATEST_PERSON_SQL) # # person_distinct_id - legacy table for person distinct IDs, do not use @@ -132,7 +128,8 @@ # :KLUDGE: We default is_deleted to 0 for backwards compatibility for when we drop `is_deleted` from message schema. # Can't make DEFAULT if(_sign==-1, 1, 0) because Cyclic aliases error. -KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL = lambda: """ +KAFKA_PERSONS_DISTINCT_ID_TABLE_SQL = ( + lambda: """ CREATE TABLE IF NOT EXISTS {table_name} ON CLUSTER '{cluster}' ( distinct_id VARCHAR, @@ -142,9 +139,10 @@ is_deleted Nullable(Int8) ) ENGINE = {engine} """.format( - table_name="kafka_" + PERSONS_DISTINCT_ID_TABLE, - cluster=CLICKHOUSE_CLUSTER, - engine=kafka_engine(KAFKA_PERSON_UNIQUE_ID), + table_name="kafka_" + PERSONS_DISTINCT_ID_TABLE, + cluster=CLICKHOUSE_CLUSTER, + engine=kafka_engine(KAFKA_PERSON_UNIQUE_ID), + ) ) # You must include the database here because of a bug in clickhouse diff --git a/posthog/models/property/util.py b/posthog/models/property/util.py index b353eb11bb141..5e25567178035 100644 --- a/posthog/models/property/util.py +++ b/posthog/models/property/util.py @@ -399,9 +399,7 @@ def negate_operator(operator: OperatorType) -> OperatorType: "is_date_before": "is_date_after", "is_date_after": "is_date_before", # is_date_exact not yet supported - }.get( - operator, operator - ) # type: ignore + }.get(operator, operator) # type: ignore def prop_filter_json_extract( diff --git a/posthog/models/property_definition.py b/posthog/models/property_definition.py index 7747a17c71820..2efc8f203192d 100644 --- a/posthog/models/property_definition.py +++ b/posthog/models/property_definition.py @@ -51,7 +51,7 @@ class Type(models.IntegerChoices): # :TRICKY: May be null for historical events type: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(default=Type.EVENT, choices=Type.choices) # Only populated for `Type.GROUP` - group_type_index: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(null=True) + group_type_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(null=True) # DEPRECATED property_type_format = models.CharField( diff --git a/posthog/models/team/team.py b/posthog/models/team/team.py index bc458807b56a4..0993a7cdd5149 100644 --- a/posthog/models/team/team.py +++ b/posthog/models/team/team.py @@ -175,7 +175,7 @@ class Team(UUIDClassicModel): decimal_places=2, validators=[MinValueValidator(Decimal(0)), MaxValueValidator(Decimal(1))], ) - session_recording_minimum_duration_milliseconds: (models.IntegerField) = models.IntegerField( + session_recording_minimum_duration_milliseconds: models.IntegerField = models.IntegerField( null=True, blank=True, validators=[MinValueValidator(0), MaxValueValidator(15000)], diff --git a/posthog/queries/actor_base_query.py b/posthog/queries/actor_base_query.py index 396c216f9c01d..75ff6ab0a97e9 100644 --- a/posthog/queries/actor_base_query.py +++ b/posthog/queries/actor_base_query.py @@ -96,7 +96,11 @@ def is_aggregating_by_groups(self) -> bool: def get_actors( self, - ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int,]: + ) -> Tuple[ + Union[QuerySet[Person], QuerySet[Group]], + Union[List[SerializedGroup], List[SerializedPerson]], + int, + ]: """Get actors in data model and dict formats. Builds query and executes""" self._filter.team = self._team query, params = self.actor_query() @@ -217,7 +221,10 @@ def add_matched_recordings_to_serialized_actors( def get_actors_from_result( self, raw_result - ) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]],]: + ) -> Tuple[ + Union[QuerySet[Person], QuerySet[Group]], + Union[List[SerializedGroup], List[SerializedPerson]], + ]: actors: Union[QuerySet[Person], QuerySet[Group]] serialized_actors: Union[List[SerializedGroup], List[SerializedPerson]] diff --git a/posthog/queries/person_query.py b/posthog/queries/person_query.py index 73a779e5aca6a..cffcce890c80b 100644 --- a/posthog/queries/person_query.py +++ b/posthog/queries/person_query.py @@ -341,7 +341,10 @@ def _get_search_clauses(self, prepend: str = "") -> Tuple[str, str, Dict]: ) finalization_sql = f"AND ({finalization_conditions_sql} OR {id_conditions_sql})" - (prefiltering_conditions_sql, prefiltering_params,) = parse_prop_grouped_clauses( + ( + prefiltering_conditions_sql, + prefiltering_params, + ) = parse_prop_grouped_clauses( team_id=self._team_id, property_group=prop_group, prepend=f"search_pre_{prepend}", diff --git a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py index f70f86fdba3cf..b3bc8bf1c03f8 100644 --- a/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py +++ b/posthog/session_recordings/queries/test/test_session_recording_list_from_session_replay.py @@ -2077,7 +2077,10 @@ def test_any_event_filter_with_properties(self): session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() - assert sorted([sr["session_id"] for sr in session_recordings], key=lambda x: x[0],) == [ + assert sorted( + [sr["session_id"] for sr in session_recordings], + key=lambda x: x[0], + ) == [ my_custom_event_session_id, non_matching__event_session_id, page_view_session_id, @@ -2108,7 +2111,10 @@ def test_any_event_filter_with_properties(self): session_recording_list_instance = SessionRecordingListFromReplaySummary(filter=filter, team=self.team) (session_recordings, _) = session_recording_list_instance.run() - assert sorted([sr["session_id"] for sr in session_recordings], key=lambda x: x[0],) == [ + assert sorted( + [sr["session_id"] for sr in session_recordings], + key=lambda x: x[0], + ) == [ my_custom_event_session_id, page_view_session_id, ] diff --git a/posthog/session_recordings/sql/session_recording_event_sql.py b/posthog/session_recordings/sql/session_recording_event_sql.py index fc52f27fbdae8..7f87898293b21 100644 --- a/posthog/session_recordings/sql/session_recording_event_sql.py +++ b/posthog/session_recordings/sql/session_recording_event_sql.py @@ -107,7 +107,8 @@ extra_fields="", ) -SESSION_RECORDING_EVENTS_TABLE_MV_SQL = lambda: """ +SESSION_RECORDING_EVENTS_TABLE_MV_SQL = ( + lambda: """ CREATE MATERIALIZED VIEW IF NOT EXISTS session_recording_events_mv ON CLUSTER '{cluster}' TO {database}.{target_table} AS SELECT @@ -123,9 +124,10 @@ _offset FROM {database}.kafka_session_recording_events """.format( - target_table="writable_session_recording_events", - cluster=settings.CLICKHOUSE_CLUSTER, - database=settings.CLICKHOUSE_DATABASE, + target_table="writable_session_recording_events", + cluster=settings.CLICKHOUSE_CLUSTER, + database=settings.CLICKHOUSE_DATABASE, + ) ) diff --git a/posthog/session_recordings/sql/session_replay_event_sql.py b/posthog/session_recordings/sql/session_replay_event_sql.py index e7c2576e93f66..9dd346b32f02c 100644 --- a/posthog/session_recordings/sql/session_replay_event_sql.py +++ b/posthog/session_recordings/sql/session_replay_event_sql.py @@ -105,7 +105,8 @@ ) -SESSION_REPLAY_EVENTS_TABLE_MV_SQL = lambda: """ +SESSION_REPLAY_EVENTS_TABLE_MV_SQL = ( + lambda: """ CREATE MATERIALIZED VIEW IF NOT EXISTS session_replay_events_mv ON CLUSTER '{cluster}' TO {database}.{target_table} AS SELECT @@ -139,9 +140,10 @@ FROM {database}.kafka_session_replay_events group by session_id, team_id """.format( - target_table="writable_session_replay_events", - cluster=settings.CLICKHOUSE_CLUSTER, - database=settings.CLICKHOUSE_DATABASE, + target_table="writable_session_replay_events", + cluster=settings.CLICKHOUSE_CLUSTER, + database=settings.CLICKHOUSE_DATABASE, + ) ) diff --git a/posthog/session_recordings/test/test_session_recordings.py b/posthog/session_recordings/test/test_session_recordings.py index 3dca9b46b9fb2..8bc9e92e90315 100644 --- a/posthog/session_recordings/test/test_session_recordings.py +++ b/posthog/session_recordings/test/test_session_recordings.py @@ -186,7 +186,7 @@ def test_console_log_filters_are_correctly_passed_to_listing(self, mock_summary_ self.client.get(f'/api/projects/{self.team.id}/session_recordings?console_logs=["warn", "error"]') assert len(mock_summary_lister.call_args_list) == 1 - filter_passed_to_mock: (SessionRecordingsFilter) = mock_summary_lister.call_args_list[0].kwargs["filter"] + filter_passed_to_mock: SessionRecordingsFilter = mock_summary_lister.call_args_list[0].kwargs["filter"] assert filter_passed_to_mock.console_logs_filter == ["warn", "error"] @snapshot_postgres_queries diff --git a/posthog/settings/base_variables.py b/posthog/settings/base_variables.py index 85831394fc501..fa63b80da1789 100644 --- a/posthog/settings/base_variables.py +++ b/posthog/settings/base_variables.py @@ -11,9 +11,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) DEBUG = get_from_env("DEBUG", False, type_cast=str_to_bool) -TEST = ( - "test" in sys.argv or sys.argv[0].endswith("pytest") or get_from_env("TEST", False, type_cast=str_to_bool) -) # type: bool +TEST = "test" in sys.argv or sys.argv[0].endswith("pytest") or get_from_env("TEST", False, type_cast=str_to_bool) # type: bool DEMO = get_from_env("DEMO", False, type_cast=str_to_bool) # Whether this is a managed demo environment REGION = get_from_env("REGION", "US") # Whether this is a Cloud US or Cloud EU instance SELF_CAPTURE = get_from_env("SELF_CAPTURE", DEBUG and not DEMO, type_cast=str_to_bool) diff --git a/posthog/tasks/check_clickhouse_schema_drift.py b/posthog/tasks/check_clickhouse_schema_drift.py index bea00530b7eba..e1cdd905854b1 100644 --- a/posthog/tasks/check_clickhouse_schema_drift.py +++ b/posthog/tasks/check_clickhouse_schema_drift.py @@ -28,9 +28,7 @@ def get_clickhouse_schema() -> List[Tuple[str, str, str]]: database == '{database}' AND table_name NOT LIKE '.inner_id.%' - """.format( - cluster=settings.CLICKHOUSE_CLUSTER, database=settings.CLICKHOUSE_DATABASE - ) + """.format(cluster=settings.CLICKHOUSE_CLUSTER, database=settings.CLICKHOUSE_DATABASE) ) @@ -47,9 +45,7 @@ def get_clickhouse_nodes() -> List[Tuple[str]]: WHERE cluster == '{cluster}' - """.format( - cluster=settings.CLICKHOUSE_CLUSTER - ) + """.format(cluster=settings.CLICKHOUSE_CLUSTER) ) diff --git a/posthog/tasks/exporter.py b/posthog/tasks/exporter.py index ed41d9d5412d0..69f968f207cea 100644 --- a/posthog/tasks/exporter.py +++ b/posthog/tasks/exporter.py @@ -47,7 +47,7 @@ def export_asset(exported_asset_id: int, limit: Optional[int] = None) -> None: # if Celery is lagging then you can end up with an exported asset that has had a TTL added # and that TTL has passed, in the exporter we don't care about that. # the TTL is for later cleanup. - exported_asset: (ExportedAsset) = ExportedAsset.objects_including_ttl_deleted.select_related( + exported_asset: ExportedAsset = ExportedAsset.objects_including_ttl_deleted.select_related( "insight", "dashboard" ).get(pk=exported_asset_id) diff --git a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py index f28be3815d846..d939495a9abe9 100644 --- a/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_bigquery_batch_export_workflow.py @@ -388,7 +388,7 @@ async def test_bigquery_export_workflow( ) with freeze_time(TEST_TIME) as frozen_time: - async with await (WorkflowEnvironment.start_time_skipping()) as activity_environment: + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: async with Worker( activity_environment.client, task_queue=settings.TEMPORAL_TASK_QUEUE, diff --git a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py index 3487c612808f3..ae112af3529a6 100644 --- a/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py +++ b/posthog/temporal/tests/batch_exports/test_s3_batch_export_workflow.py @@ -601,7 +601,7 @@ async def create_s3_client(*args, **kwargs): """Mock function to return an already initialized S3 client.""" yield s3_client - async with await (WorkflowEnvironment.start_time_skipping()) as activity_environment: + async with await WorkflowEnvironment.start_time_skipping() as activity_environment: async with Worker( activity_environment.client, task_queue=settings.TEMPORAL_TASK_QUEUE, diff --git a/posthog/temporal/workflows/batch_exports.py b/posthog/temporal/workflows/batch_exports.py index bf375ada75ce7..e26d8901900b9 100644 --- a/posthog/temporal/workflows/batch_exports.py +++ b/posthog/temporal/workflows/batch_exports.py @@ -702,6 +702,4 @@ class UpdateBatchExportBackfillStatusInputs: @activity.defn async def update_batch_export_backfill_model_status(inputs: UpdateBatchExportBackfillStatusInputs): """Activity that updates the status of an BatchExportRun.""" - await sync_to_async(update_batch_export_backfill_status)( - backfill_id=uuid.UUID(inputs.id), status=inputs.status - ) # type: ignore + await sync_to_async(update_batch_export_backfill_status)(backfill_id=uuid.UUID(inputs.id), status=inputs.status) # type: ignore diff --git a/pyproject.toml b/pyproject.toml index b3108325e5b6b..689a4c32bf2a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,13 +3,10 @@ line-length = 120 target-version = ['py310'] [tool.isort] -multi_line_output = 3 -include_trailing_comma = true -force_grid_wrap = 8 -ensure_newline_before_comments = true -line_length = 120 +profile = "black" [tool.ruff] +line-length = 120 exclude = [ ".git", "./plugin-server/node_modules/",