Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: Use ruff formatter #18207

Merged
merged 12 commits into from
Oct 31, 2023
4 changes: 2 additions & 2 deletions .github/workflows/ci-backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -125,11 +125,11 @@ jobs:

- name: Check for syntax errors, import sort, and code style violations
run: |
ruff .
ruff check .

- name: Check formatting
run: |
black --exclude posthog/hogql/grammar --check --diff .
ruff format --exclude posthog/hogql/grammar --check --diff .

- name: Check static typing
run: |
Expand Down
2 changes: 1 addition & 1 deletion ee/api/explicit_team_member.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def create(self, validated_data):
user_uuid = validated_data.pop("user_uuid")
validated_data["team"] = team
try:
requesting_parent_membership: (OrganizationMembership) = OrganizationMembership.objects.get(
requesting_parent_membership: OrganizationMembership = OrganizationMembership.objects.get(
organization_id=team.organization_id,
user__uuid=user_uuid,
user__is_active=True,
Expand Down
2 changes: 1 addition & 1 deletion ee/api/role.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class RolePermissions(BasePermission):
def has_permission(self, request, view):
organization = request.user.organization

requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get(
requesting_membership: OrganizationMembership = OrganizationMembership.objects.get(
user_id=cast(User, request.user).id,
organization=organization,
)
Expand Down
30 changes: 18 additions & 12 deletions ee/clickhouse/models/test/test_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -1219,13 +1219,16 @@ def test_person_cohort_properties(self):
)
query = """
SELECT distinct_id FROM person_distinct_id2 WHERE team_id = %(team_id)s {prop_clause}
""".format(
prop_clause=prop_clause
)
""".format(prop_clause=prop_clause)
# get distinct_id column of result
result = sync_execute(query, {"team_id": self.team.pk, **prop_clause_params, **filter.hogql_context.values,},)[
0
][0]
result = sync_execute(
query,
{
"team_id": self.team.pk,
**prop_clause_params,
**filter.hogql_context.values,
},
)[0][0]
self.assertEqual(result, person1_distinct_id)

# test cohort2 with negation
Expand All @@ -1241,13 +1244,16 @@ def test_person_cohort_properties(self):
)
query = """
SELECT distinct_id FROM person_distinct_id2 WHERE team_id = %(team_id)s {prop_clause}
""".format(
prop_clause=prop_clause
)
""".format(prop_clause=prop_clause)
# get distinct_id column of result
result = sync_execute(query, {"team_id": self.team.pk, **prop_clause_params, **filter.hogql_context.values,},)[
0
][0]
result = sync_execute(
query,
{
"team_id": self.team.pk,
**prop_clause_params,
**filter.hogql_context.values,
},
)[0][0]

self.assertEqual(result, person2_distinct_id)

Expand Down
6 changes: 5 additions & 1 deletion ee/clickhouse/queries/funnels/funnel_correlation_persons.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,11 @@ def actor_query(self, limit_actors: Optional[bool] = True):

def get_actors(
self,
) -> Tuple[Union[QuerySet[Person], QuerySet[Group]], Union[List[SerializedGroup], List[SerializedPerson]], int,]:
) -> Tuple[
Union[QuerySet[Person], QuerySet[Group]],
Union[List[SerializedGroup], List[SerializedPerson]],
int,
]:
if self._filter.correlation_type == FunnelCorrelationType.PROPERTIES:
return _FunnelPropertyCorrelationActors(self._filter, self._team, self._base_uri).get_actors()
else:
Expand Down
2 changes: 1 addition & 1 deletion ee/models/role.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ class Role(UUIDModel):
related_name="roles",
related_query_name="role",
)
feature_flags_access_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(
feature_flags_access_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(
default=OrganizationResourceAccess.AccessLevel.CAN_ALWAYS_EDIT,
choices=OrganizationResourceAccess.AccessLevel.choices,
)
Expand Down
4 changes: 1 addition & 3 deletions ee/session_recordings/session_recording_playlist.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,9 +265,7 @@ def modify_recordings(
return response.Response({"success": True})

if request.method == "DELETE":
playlist_item = SessionRecordingPlaylistItem.objects.get(
playlist=playlist, recording=session_recording_id
) # type: ignore
playlist_item = SessionRecordingPlaylistItem.objects.get(playlist=playlist, recording=session_recording_id) # type: ignore

if playlist_item:
playlist_item.delete()
Expand Down
4 changes: 1 addition & 3 deletions ee/tasks/test/test_calculate_cohort.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,7 @@
from posthog.test.base import ClickhouseTestMixin, _create_event, _create_person


class TestClickhouseCalculateCohort(
ClickhouseTestMixin, calculate_cohort_test_factory(_create_event, _create_person)
): # type: ignore
class TestClickhouseCalculateCohort(ClickhouseTestMixin, calculate_cohort_test_factory(_create_event, _create_person)): # type: ignore
@patch("posthog.tasks.calculate_cohort.insert_cohort_from_insight_filter.delay")
def test_create_stickiness_cohort(self, _insert_cohort_from_insight_filter):
_create_person(team_id=self.team.pk, distinct_ids=["blabla"])
Expand Down
8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
"build:esbuild": "node frontend/build.mjs",
"schema:build": "pnpm run schema:build:json && pnpm run schema:build:python",
"schema:build:json": "ts-json-schema-generator -f tsconfig.json --path 'frontend/src/queries/schema.ts' --no-type-check > frontend/src/queries/schema.json && prettier --write frontend/src/queries/schema.json",
"schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && black posthog/schema.py",
"schema:build:python": "datamodel-codegen --class-name='SchemaRoot' --collapse-root-models --disable-timestamp --use-one-literal-as-default --use-default-kwarg --use-subclass-enum --input frontend/src/queries/schema.json --input-file-type jsonschema --output posthog/schema.py --output-model-type pydantic_v2.BaseModel && ruff format posthog/schema.py",
"grammar:build": "npm run grammar:build:python && npm run grammar:build:cpp",
"grammar:build:python": "cd posthog/hogql/grammar && antlr -Dlanguage=Python3 HogQLLexer.g4 && antlr -visitor -no-listener -Dlanguage=Python3 HogQLParser.g4",
"grammar:build:cpp": "cd posthog/hogql/grammar && antlr -o ../../../hogql_parser -Dlanguage=Cpp HogQLLexer.g4 && antlr -o ../../../hogql_parser -visitor -no-listener -Dlanguage=Cpp HogQLParser.g4",
Expand All @@ -54,7 +54,7 @@
"typegen:check": "kea-typegen check",
"typegen:watch": "kea-typegen watch --delete --show-ts-errors",
"typegen:clean": "find frontend/src -type f -name '*Type.ts' -delete",
"format:python": "black . && isort .",
"format:python": "ruff --exclude posthog/hogql/grammar .",
"format:js": "pnpm prettier && pnpm eslint --fix",
"format": "pnpm format:python && pnpm format:js",
"storybook": "storybook dev -p 6006",
Expand Down Expand Up @@ -305,8 +305,8 @@
"pnpm --dir plugin-server exec prettier --write"
],
"!(posthog/hogql/grammar/*)*.{py,pyi}": [
"black",
"ruff"
"ruff format",
"ruff check"
],
"!(HogQL*)*.{c,cpp,h,hpp}": "clang-format -i"
},
Expand Down
4 changes: 2 additions & 2 deletions posthog/api/feature_flag.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,10 @@ class FeatureFlagSerializer(TaggedItemSerializerMixin, serializers.HyperlinkedMo
is_simple_flag = serializers.SerializerMethodField()
rollout_percentage = serializers.SerializerMethodField()

experiment_set: (serializers.PrimaryKeyRelatedField) = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
experiment_set: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
surveys: serializers.SerializerMethodField = serializers.SerializerMethodField()
features: serializers.SerializerMethodField = serializers.SerializerMethodField()
usage_dashboard: (serializers.PrimaryKeyRelatedField) = serializers.PrimaryKeyRelatedField(read_only=True)
usage_dashboard: serializers.PrimaryKeyRelatedField = serializers.PrimaryKeyRelatedField(read_only=True)
analytics_dashboards = serializers.PrimaryKeyRelatedField(
many=True,
required=False,
Expand Down
3 changes: 1 addition & 2 deletions posthog/api/notebook.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,8 +273,7 @@ def _filter_request(self, request: request.Request, queryset: QuerySet) -> Query
queryset = queryset.filter(
# some notebooks have no text_content until next saved, so we need to check the title too
# TODO this can be removed once all/most notebooks have text_content
Q(title__search=request.GET["search"])
| Q(text_content__search=request.GET["search"])
Q(title__search=request.GET["search"]) | Q(text_content__search=request.GET["search"])
)
elif key == "contains":
contains = request.GET["contains"]
Expand Down
4 changes: 2 additions & 2 deletions posthog/api/organization_member.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def has_object_permission(self, request: Request, view, membership: Organization
if request.method in SAFE_METHODS:
return True
organization = extract_organization(membership)
requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get(
requesting_membership: OrganizationMembership = OrganizationMembership.objects.get(
user_id=cast(User, request.user).id,
organization=organization,
)
Expand Down Expand Up @@ -66,7 +66,7 @@ def get_has_social_auth(self, instance: OrganizationMembership) -> bool:
def update(self, updated_membership, validated_data, **kwargs):
updated_membership = cast(OrganizationMembership, updated_membership)
raise_errors_on_nested_writes("update", self, validated_data)
requesting_membership: (OrganizationMembership) = OrganizationMembership.objects.get(
requesting_membership: OrganizationMembership = OrganizationMembership.objects.get(
organization=updated_membership.organization,
user=self.context["request"].user,
)
Expand Down
4 changes: 3 additions & 1 deletion posthog/api/signup.py
Original file line number Diff line number Diff line change
Expand Up @@ -502,7 +502,9 @@ def social_create_user(
user=user.id if user else None,
)
if user:
backend_processor = "domain_whitelist" # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes)
backend_processor = (
"domain_whitelist"
) # This is actually `jit_provisioning` (name kept for backwards-compatibility purposes)
from_invite = True # jit_provisioning means they're definitely not organization_first_user

if not user:
Expand Down
2 changes: 1 addition & 1 deletion posthog/api/team.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def validate(self, attrs: Any) -> Any:
organization_id = self.instance.organization_id
else:
organization_id = self.context["view"].organization
org_membership: (OrganizationMembership) = OrganizationMembership.objects.only("level").get(
org_membership: OrganizationMembership = OrganizationMembership.objects.only("level").get(
organization_id=organization_id, user=request.user
)
if org_membership.level < OrganizationMembership.Level.ADMIN:
Expand Down
14 changes: 8 additions & 6 deletions posthog/api/test/test_event.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,9 +446,10 @@ def test_pagination(self):
from posthog.client import sync_execute

self.assertEqual(
sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk},)[
0
][0],
sync_execute(
"select count(*) from events where team_id = %(team_id)s",
{"team_id": self.team.pk},
)[0][0],
250,
)

Expand Down Expand Up @@ -495,9 +496,10 @@ def test_pagination_bounded_date_range(self):
from posthog.client import sync_execute

self.assertEqual(
sync_execute("select count(*) from events where team_id = %(team_id)s", {"team_id": self.team.pk},)[
0
][0],
sync_execute(
"select count(*) from events where team_id = %(team_id)s",
{"team_id": self.team.pk},
)[0][0],
25,
)

Expand Down
7 changes: 4 additions & 3 deletions posthog/api/test/test_person.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,9 +339,10 @@ def test_delete_person(self):
self.assertEqual([(100, 1, "{}")], ch_persons)
# No async deletion is scheduled
self.assertEqual(AsyncDeletion.objects.filter(team_id=self.team.id).count(), 0)
ch_events = sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[
0
][0]
ch_events = sync_execute(
"SELECT count() FROM events WHERE team_id = %(team_id)s",
{"team_id": self.team.pk},
)[0][0]
self.assertEqual(ch_events, 3)

@freeze_time("2021-08-25T22:09:14.252Z")
Expand Down
2 changes: 1 addition & 1 deletion posthog/clickhouse/client/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def sync_execute(
from posthog.test.base import flush_persons_and_events

flush_persons_and_events()
except (ModuleNotFoundError): # when we run plugin server tests it tries to run above, ignore
except ModuleNotFoundError: # when we run plugin server tests it tries to run above, ignore
pass

with get_pool(workload, team_id, readonly).get_client() as client:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@
)
from posthog.settings.data_stores import CLICKHOUSE_CLUSTER

SESSION_RECORDING_EVENTS_MATERIALIZED_COLUMN_COMMENTS_SQL = lambda: """
SESSION_RECORDING_EVENTS_MATERIALIZED_COLUMN_COMMENTS_SQL = (
lambda: """
ALTER TABLE session_recording_events
ON CLUSTER '{cluster}'
COMMENT COLUMN has_full_snapshot 'column_materializer::has_full_snapshot'
""".format(
cluster=CLICKHOUSE_CLUSTER
""".format(cluster=CLICKHOUSE_CLUSTER)
)

operations = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
from posthog.client import sync_execute
from posthog.settings import CLICKHOUSE_CLUSTER

SESSION_RECORDING_EVENTS_MATERIALIZED_COLUMN_COMMENTS_SQL = lambda: """
SESSION_RECORDING_EVENTS_MATERIALIZED_COLUMN_COMMENTS_SQL = (
lambda: """
ALTER TABLE session_recording_events
ON CLUSTER '{cluster}'
COMMENT COLUMN has_full_snapshot 'column_materializer::has_full_snapshot'
""".format(
cluster=CLICKHOUSE_CLUSTER
""".format(cluster=CLICKHOUSE_CLUSTER)
)


Expand Down
14 changes: 8 additions & 6 deletions posthog/demo/test/test_matrix_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,10 @@ def test_run_on_team(self):

# At least one event for each cluster
assert (
sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[
0
][0]
sync_execute(
"SELECT count() FROM events WHERE team_id = %(team_id)s",
{"team_id": self.team.pk},
)[0][0]
>= 3
)
assert self.team.name == DummyMatrix.PRODUCT_NAME
Expand All @@ -100,8 +101,9 @@ def test_run_on_team_using_pre_save(self):
# At least one event for each cluster
assert sync_execute("SELECT count() FROM events WHERE team_id = 0")[0][0] >= 3
assert (
sync_execute("SELECT count() FROM events WHERE team_id = %(team_id)s", {"team_id": self.team.pk},)[
0
][0]
sync_execute(
"SELECT count() FROM events WHERE team_id = %(team_id)s",
{"team_id": self.team.pk},
)[0][0]
>= 3
)
4 changes: 2 additions & 2 deletions posthog/hogql/database/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,10 @@ class Database(BaseModel):
cohort_people: CohortPeople = CohortPeople()
static_cohort_people: StaticCohortPeople = StaticCohortPeople()
log_entries: LogEntriesTable = LogEntriesTable()
console_logs_log_entries: (ReplayConsoleLogsLogEntriesTable) = ReplayConsoleLogsLogEntriesTable()
console_logs_log_entries: ReplayConsoleLogsLogEntriesTable = ReplayConsoleLogsLogEntriesTable()
batch_export_log_entries: BatchExportLogEntriesTable = BatchExportLogEntriesTable()

raw_session_replay_events: (RawSessionReplayEventsTable) = RawSessionReplayEventsTable()
raw_session_replay_events: RawSessionReplayEventsTable = RawSessionReplayEventsTable()
raw_person_distinct_ids: RawPersonDistinctIdsTable = RawPersonDistinctIdsTable()
raw_persons: RawPersonsTable = RawPersonsTable()
raw_groups: RawGroupsTable = RawGroupsTable()
Expand Down
4 changes: 1 addition & 3 deletions posthog/hogql_queries/sessions_timeline_query_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,9 +160,7 @@ def calculate(self) -> SessionsTimelineQueryResponse:
formal_session_id,
informal_session_id,
recording_duration_s,
) in reversed(
query_result.results[: self.EVENT_LIMIT]
): # The last result is a marker of more results
) in reversed(query_result.results[: self.EVENT_LIMIT]): # The last result is a marker of more results
entry_id = str(formal_session_id or informal_session_id)
if entry_id not in timeline_entries_map:
timeline_entries_map[entry_id] = TimelineEntry(
Expand Down
2 changes: 1 addition & 1 deletion posthog/models/async_migration.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class Meta:
null=False, blank=False, default=MigrationStatus.NotStarted
)

current_operation_index: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(
current_operation_index: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(
null=False, blank=False, default=0
)
current_query_id: models.CharField = models.CharField(max_length=100, null=False, blank=False, default="")
Expand Down
10 changes: 6 additions & 4 deletions posthog/models/cohort/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
"""

COHORTPEOPLE_TABLE_ENGINE = lambda: CollapsingMergeTree("cohortpeople", ver="sign")
CREATE_COHORTPEOPLE_TABLE_SQL = lambda: """
CREATE_COHORTPEOPLE_TABLE_SQL = (
lambda: """
CREATE TABLE IF NOT EXISTS cohortpeople ON CLUSTER '{cluster}'
(
person_id UUID,
Expand All @@ -19,9 +20,10 @@
Order By (team_id, cohort_id, person_id, version)
{storage_policy}
""".format(
cluster=CLICKHOUSE_CLUSTER,
engine=COHORTPEOPLE_TABLE_ENGINE(),
storage_policy="",
cluster=CLICKHOUSE_CLUSTER,
engine=COHORTPEOPLE_TABLE_ENGINE(),
storage_policy="",
)
)

TRUNCATE_COHORTPEOPLE_TABLE_SQL = f"TRUNCATE TABLE IF EXISTS cohortpeople ON CLUSTER '{CLICKHOUSE_CLUSTER}'"
Expand Down
2 changes: 1 addition & 1 deletion posthog/models/dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ class PrivilegeLevel(models.IntegerChoices):
last_accessed_at: models.DateTimeField = models.DateTimeField(blank=True, null=True)
filters: models.JSONField = models.JSONField(default=dict)
creation_mode: models.CharField = models.CharField(max_length=16, default="default", choices=CreationMode.choices)
restriction_level: (models.PositiveSmallIntegerField) = models.PositiveSmallIntegerField(
restriction_level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField(
default=RestrictionLevel.EVERYONE_IN_PROJECT_CAN_EDIT,
choices=RestrictionLevel.choices,
)
Expand Down
Loading
Loading