Skip to content

Commit

Permalink
Enable more ruff rules
Browse files Browse the repository at this point in the history
  • Loading branch information
webjunkie committed Apr 3, 2024
1 parent 2fe9d37 commit a1b6b79
Show file tree
Hide file tree
Showing 126 changed files with 676 additions and 346 deletions.
10 changes: 5 additions & 5 deletions ee/billing/quota_limiting.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,17 +327,17 @@ def update_all_org_billing_quotas(
period_start, period_end = period

# Clickhouse is good at counting things so we count across all teams rather than doing it one by one
all_data = dict(
teams_with_event_count_in_period=convert_team_usage_rows_to_dict(
all_data = {
"teams_with_event_count_in_period": convert_team_usage_rows_to_dict(
get_teams_with_billable_event_count_in_period(period_start, period_end)
),
teams_with_recording_count_in_period=convert_team_usage_rows_to_dict(
"teams_with_recording_count_in_period": convert_team_usage_rows_to_dict(
get_teams_with_recording_count_in_period(period_start, period_end)
),
teams_with_rows_synced_in_period=convert_team_usage_rows_to_dict(
"teams_with_rows_synced_in_period": convert_team_usage_rows_to_dict(
get_teams_with_rows_synced_in_period(period_start, period_end)
),
)
}

teams: Sequence[Team] = list(
Team.objects.select_related("organization")
Expand Down
24 changes: 12 additions & 12 deletions ee/billing/test/test_quota_limiting.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,15 +440,15 @@ def test_set_org_usage_summary_updates_correctly(self):
}
self.organization.save()

new_usage = dict(
events={"usage": 100, "limit": 100},
recordings={"usage": 2, "limit": 100},
rows_synced={"usage": 6, "limit": 100},
period=[
new_usage = {
"events": {"usage": 100, "limit": 100},
"recordings": {"usage": 2, "limit": 100},
"rows_synced": {"usage": 6, "limit": 100},
"period": [
"2021-01-01T00:00:00Z",
"2021-01-31T23:59:59Z",
],
)
}

assert set_org_usage_summary(self.organization, new_usage=new_usage)

Expand All @@ -468,15 +468,15 @@ def test_set_org_usage_summary_does_nothing_if_the_same(self):
}
self.organization.save()

new_usage = dict(
events={"usage": 99, "limit": 100},
recordings={"usage": 1, "limit": 100},
rows_synced={"usage": 5, "limit": 100},
period=[
new_usage = {
"events": {"usage": 99, "limit": 100},
"recordings": {"usage": 1, "limit": 100},
"rows_synced": {"usage": 5, "limit": 100},
"period": [
"2021-01-01T00:00:00Z",
"2021-01-31T23:59:59Z",
],
)
}

assert not set_org_usage_summary(self.organization, new_usage=new_usage)

Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/materialized_columns/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def group_on_events_properties(self, group_type_index: int, team_id: str) -> Set

def _get_properties(self, query, team_id) -> Set[str]:
rows = sync_execute(query, {"team_id": team_id})
return set(name for name, _ in rows)
return {name for name, _ in rows}


class Query:
Expand Down
60 changes: 27 additions & 33 deletions ee/clickhouse/models/test/test_property.py
Original file line number Diff line number Diff line change
Expand Up @@ -1773,18 +1773,16 @@ def clean_up_materialised_columns():
@freeze_time("2021-04-01T01:00:00.000Z")
def test_prop_filter_json_extract(test_events, clean_up_materialised_columns, property, expected_event_indexes, team):
query, params = prop_filter_json_extract(property, 0, allow_denormalized_props=False)
uuids = list(
sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
uuids = sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
expected = list(sorted([test_events[index] for index in expected_event_indexes]))
expected = sorted([test_events[index] for index in expected_event_indexes])

assert len(uuids) == len(expected) # helpful when diagnosing assertion failure below
assert uuids == expected
Expand All @@ -1801,18 +1799,16 @@ def test_prop_filter_json_extract_materialized(

assert "JSONExtract" not in query

uuids = list(
sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
uuids = sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
expected = list(sorted([test_events[index] for index in expected_event_indexes]))
expected = sorted([test_events[index] for index in expected_event_indexes])

assert uuids == expected

Expand All @@ -1837,18 +1833,16 @@ def test_prop_filter_json_extract_person_on_events_materialized(
)
assert "JSON" not in query

uuids = list(
sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
uuids = sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
expected = list(sorted([test_events[index] for index in expected_event_indexes]))
expected = sorted([test_events[index] for index in expected_event_indexes])

assert uuids == expected

Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/queries/column_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class EnterpriseColumnOptimizer(FOSSColumnOptimizer):
@cached_property
def group_types_to_query(self) -> Set[GroupTypeIndex]:
used_properties = self.used_properties_with_type("group")
return set(cast(GroupTypeIndex, group_type_index) for _, _, group_type_index in used_properties)
return {cast(GroupTypeIndex, group_type_index) for _, _, group_type_index in used_properties}

@cached_property
def group_on_event_columns_to_query(self) -> Set[ColumnName]:
Expand Down
12 changes: 9 additions & 3 deletions ee/clickhouse/queries/event_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,19 @@ def __init__(
should_join_distinct_ids=False,
should_join_persons=False,
# Extra events/person table columns to fetch since parent query needs them
extra_fields: List[ColumnName] = [],
extra_event_properties: List[PropertyName] = [],
extra_person_fields: List[ColumnName] = [],
extra_fields: List[ColumnName] = None,
extra_event_properties: List[PropertyName] = None,
extra_person_fields: List[ColumnName] = None,
override_aggregate_users_by_distinct_id: Optional[bool] = None,
person_on_events_mode: PersonOnEventsMode = PersonOnEventsMode.DISABLED,
**kwargs,
) -> None:
if extra_person_fields is None:
extra_person_fields = []
if extra_event_properties is None:
extra_event_properties = []
if extra_fields is None:
extra_fields = []
super().__init__(
filter=filter,
team=team,
Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/queries/experiments/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def requires_flag_warning(filter: Filter, team: Team) -> bool:
events.add(entity.id)

entity_query = f"AND event IN %(events_list)s"
entity_params = {"events_list": sorted(list(events))}
entity_params = {"events_list": sorted(events)}

events_result = sync_execute(
f"""
Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/queries/funnels/funnel_correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,7 @@ def _get_funnel_step_names(self):
elif entity.id is not None:
events.add(entity.id)

return sorted(list(events))
return sorted(events)

def _run(self) -> Tuple[List[EventOddsRatio], bool]:
"""
Expand Down
4 changes: 3 additions & 1 deletion ee/clickhouse/queries/test/test_cohort_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,10 @@ def _make_event_sequence(
interval_days,
period_event_counts,
event="$pageview",
properties={},
properties=None,
):
if properties is None:
properties = {}
for period_index, event_count in enumerate(period_event_counts):
for i in range(event_count):
_create_event(
Expand Down
4 changes: 2 additions & 2 deletions ee/migrations/0012_migrate_tags_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def forwards(apps, schema_editor):
)
event_definitions = iter(event_definition_paginator.get_page(event_definition_page))
for tags, team_id, event_definition_id in event_definitions:
unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "")
unique_tags = {tagify(t) for t in tags if isinstance(t, str) and t.strip() != ""}
for tag in unique_tags:
temp_tag = Tag(name=tag, team_id=team_id)
createables.append(
Expand Down Expand Up @@ -71,7 +71,7 @@ def forwards(apps, schema_editor):
)
property_definitions = iter(property_definition_paginator.get_page(property_definition_page))
for tags, team_id, property_definition_id in property_definitions:
unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "")
unique_tags = {tagify(t) for t in tags if isinstance(t, str) and t.strip() != ""}
for tag in unique_tags:
temp_tag = Tag(name=tag, team_id=team_id)
createables.append(
Expand Down
14 changes: 7 additions & 7 deletions ee/tasks/test/subscriptions/subscriptions_test_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@


def create_subscription(**kwargs: Any) -> Subscription:
payload = dict(
target_type="email",
target_value="[email protected],[email protected]",
frequency="daily",
interval=1,
start_date=datetime(2022, 1, 1, 9, 0).replace(tzinfo=ZoneInfo("UTC")),
)
payload = {
"target_type": "email",
"target_value": "[email protected],[email protected]",
"frequency": "daily",
"interval": 1,
"start_date": datetime(2022, 1, 1, 9, 0).replace(tzinfo=ZoneInfo("UTC")),
}

payload.update(kwargs)
return Subscription.objects.create(**payload)
Loading

0 comments on commit a1b6b79

Please sign in to comment.