Skip to content

Commit

Permalink
Enable more ruff rules
Browse files Browse the repository at this point in the history
  • Loading branch information
webjunkie committed Apr 8, 2024
1 parent e10437e commit 2172137
Show file tree
Hide file tree
Showing 92 changed files with 263 additions and 279 deletions.
10 changes: 5 additions & 5 deletions ee/billing/quota_limiting.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,17 +327,17 @@ def update_all_org_billing_quotas(
period_start, period_end = period

# Clickhouse is good at counting things so we count across all teams rather than doing it one by one
all_data = dict(
teams_with_event_count_in_period=convert_team_usage_rows_to_dict(
all_data = {
"teams_with_event_count_in_period": convert_team_usage_rows_to_dict(
get_teams_with_billable_event_count_in_period(period_start, period_end)
),
teams_with_recording_count_in_period=convert_team_usage_rows_to_dict(
"teams_with_recording_count_in_period": convert_team_usage_rows_to_dict(
get_teams_with_recording_count_in_period(period_start, period_end)
),
teams_with_rows_synced_in_period=convert_team_usage_rows_to_dict(
"teams_with_rows_synced_in_period": convert_team_usage_rows_to_dict(
get_teams_with_rows_synced_in_period(period_start, period_end)
),
)
}

teams: Sequence[Team] = list(
Team.objects.select_related("organization")
Expand Down
24 changes: 12 additions & 12 deletions ee/billing/test/test_quota_limiting.py
Original file line number Diff line number Diff line change
Expand Up @@ -440,15 +440,15 @@ def test_set_org_usage_summary_updates_correctly(self):
}
self.organization.save()

new_usage = dict(
events={"usage": 100, "limit": 100},
recordings={"usage": 2, "limit": 100},
rows_synced={"usage": 6, "limit": 100},
period=[
new_usage = {
"events": {"usage": 100, "limit": 100},
"recordings": {"usage": 2, "limit": 100},
"rows_synced": {"usage": 6, "limit": 100},
"period": [
"2021-01-01T00:00:00Z",
"2021-01-31T23:59:59Z",
],
)
}

assert set_org_usage_summary(self.organization, new_usage=new_usage)

Expand All @@ -468,15 +468,15 @@ def test_set_org_usage_summary_does_nothing_if_the_same(self):
}
self.organization.save()

new_usage = dict(
events={"usage": 99, "limit": 100},
recordings={"usage": 1, "limit": 100},
rows_synced={"usage": 5, "limit": 100},
period=[
new_usage = {
"events": {"usage": 99, "limit": 100},
"recordings": {"usage": 1, "limit": 100},
"rows_synced": {"usage": 5, "limit": 100},
"period": [
"2021-01-01T00:00:00Z",
"2021-01-31T23:59:59Z",
],
)
}

assert not set_org_usage_summary(self.organization, new_usage=new_usage)

Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/materialized_columns/analyze.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def group_on_events_properties(self, group_type_index: int, team_id: str) -> Set

def _get_properties(self, query, team_id) -> Set[str]:
rows = sync_execute(query, {"team_id": team_id})
return set(name for name, _ in rows)
return {name for name, _ in rows}


class Query:
Expand Down
60 changes: 27 additions & 33 deletions ee/clickhouse/models/test/test_property.py
Original file line number Diff line number Diff line change
Expand Up @@ -1773,18 +1773,16 @@ def clean_up_materialised_columns():
@freeze_time("2021-04-01T01:00:00.000Z")
def test_prop_filter_json_extract(test_events, clean_up_materialised_columns, property, expected_event_indexes, team):
query, params = prop_filter_json_extract(property, 0, allow_denormalized_props=False)
uuids = list(
sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
uuids = sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
expected = list(sorted([test_events[index] for index in expected_event_indexes]))
expected = sorted([test_events[index] for index in expected_event_indexes])

assert len(uuids) == len(expected) # helpful when diagnosing assertion failure below
assert uuids == expected
Expand All @@ -1801,18 +1799,16 @@ def test_prop_filter_json_extract_materialized(

assert "JSONExtract" not in query

uuids = list(
sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
uuids = sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
expected = list(sorted([test_events[index] for index in expected_event_indexes]))
expected = sorted([test_events[index] for index in expected_event_indexes])

assert uuids == expected

Expand All @@ -1837,18 +1833,16 @@ def test_prop_filter_json_extract_person_on_events_materialized(
)
assert "JSON" not in query

uuids = list(
sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
uuids = sorted(
[
str(uuid)
for (uuid,) in sync_execute(
f"SELECT uuid FROM events WHERE team_id = %(team_id)s {query}",
{"team_id": team.pk, **params},
)
]
)
expected = list(sorted([test_events[index] for index in expected_event_indexes]))
expected = sorted([test_events[index] for index in expected_event_indexes])

assert uuids == expected

Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/queries/column_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class EnterpriseColumnOptimizer(FOSSColumnOptimizer):
@cached_property
def group_types_to_query(self) -> Set[GroupTypeIndex]:
used_properties = self.used_properties_with_type("group")
return set(cast(GroupTypeIndex, group_type_index) for _, _, group_type_index in used_properties)
return {cast(GroupTypeIndex, group_type_index) for _, _, group_type_index in used_properties}

@cached_property
def group_on_event_columns_to_query(self) -> Set[ColumnName]:
Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/queries/experiments/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def requires_flag_warning(filter: Filter, team: Team) -> bool:
events.add(entity.id)

entity_query = f"AND event IN %(events_list)s"
entity_params = {"events_list": sorted(list(events))}
entity_params = {"events_list": sorted(events)}

events_result = sync_execute(
f"""
Expand Down
2 changes: 1 addition & 1 deletion ee/clickhouse/queries/funnels/funnel_correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -587,7 +587,7 @@ def _get_funnel_step_names(self):
elif entity.id is not None:
events.add(entity.id)

return sorted(list(events))
return sorted(events)

def _run(self) -> Tuple[List[EventOddsRatio], bool]:
"""
Expand Down
4 changes: 2 additions & 2 deletions ee/migrations/0012_migrate_tags_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def forwards(apps, schema_editor):
)
event_definitions = iter(event_definition_paginator.get_page(event_definition_page))
for tags, team_id, event_definition_id in event_definitions:
unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "")
unique_tags = {tagify(t) for t in tags if isinstance(t, str) and t.strip() != ""}
for tag in unique_tags:
temp_tag = Tag(name=tag, team_id=team_id)
createables.append(
Expand Down Expand Up @@ -71,7 +71,7 @@ def forwards(apps, schema_editor):
)
property_definitions = iter(property_definition_paginator.get_page(property_definition_page))
for tags, team_id, property_definition_id in property_definitions:
unique_tags = set(tagify(t) for t in tags if isinstance(t, str) and t.strip() != "")
unique_tags = {tagify(t) for t in tags if isinstance(t, str) and t.strip() != ""}
for tag in unique_tags:
temp_tag = Tag(name=tag, team_id=team_id)
createables.append(
Expand Down
14 changes: 7 additions & 7 deletions ee/tasks/test/subscriptions/subscriptions_test_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,13 @@


def create_subscription(**kwargs: Any) -> Subscription:
payload = dict(
target_type="email",
target_value="[email protected],[email protected]",
frequency="daily",
interval=1,
start_date=datetime(2022, 1, 1, 9, 0).replace(tzinfo=ZoneInfo("UTC")),
)
payload = {
"target_type": "email",
"target_value": "[email protected],[email protected]",
"frequency": "daily",
"interval": 1,
"start_date": datetime(2022, 1, 1, 9, 0).replace(tzinfo=ZoneInfo("UTC")),
}

payload.update(kwargs)
return Subscription.objects.create(**payload)
2 changes: 1 addition & 1 deletion posthog/api/documentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,5 +250,5 @@ def custom_postprocessing_hook(result, generator, request, public):
**result,
"info": {"title": "PostHog API", "version": None, "description": ""},
"paths": paths,
"x-tagGroups": [{"name": "All endpoints", "tags": sorted(list(set(all_tags)))}],
"x-tagGroups": [{"name": "All endpoints", "tags": sorted(set(all_tags))}],
}
2 changes: 1 addition & 1 deletion posthog/api/ingestion_warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,4 @@ def _calculate_summaries(warning_events):
summaries[warning_type]["warnings"].append({"type": warning_type, "timestamp": timestamp, "details": details})
summaries[warning_type]["count"] += 1

return list(sorted(summaries.values(), key=lambda summary: summary["lastSeen"], reverse=True))
return sorted(summaries.values(), key=lambda summary: summary["lastSeen"], reverse=True)
12 changes: 6 additions & 6 deletions posthog/api/sharing.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,12 +122,12 @@ def _get_sharing_configuration(self, context: Dict[str, Any]):
insight = context.get("insight")
recording = context.get("recording")

config_kwargs = dict(
team_id=self.team_id,
insight=insight,
dashboard=dashboard,
recording=recording,
)
config_kwargs = {
"team_id": self.team_id,
"insight": insight,
"dashboard": dashboard,
"recording": recording,
}

try:
instance = SharingConfiguration.objects.get(**config_kwargs)
Expand Down
2 changes: 1 addition & 1 deletion posthog/api/test/dashboards/test_dashboard.py
Original file line number Diff line number Diff line change
Expand Up @@ -903,7 +903,7 @@ def test_dashboard_duplication(self):
self.assertEqual(len(response["tiles"]), len(existing_dashboard.insights.all()))

existing_dashboard_item_id_set = {tile1.pk, tile2.pk}
response_item_id_set = set(map(lambda x: x.get("id", None), response["tiles"]))
response_item_id_set = {x.get("id", None) for x in response["tiles"]}
# check both sets are disjoint to verify that the new items' ids are different than the existing items

self.assertTrue(existing_dashboard_item_id_set.isdisjoint(response_item_id_set))
Expand Down
2 changes: 1 addition & 1 deletion posthog/api/test/dashboards/test_dashboard_text_tiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def test_can_update_a_single_text_tile_color(self) -> None:
dashboard_id, dashboard_json = self.dashboard_api.update_text_tile(dashboard_id, updated_tile)

assert len(dashboard_json["tiles"]) == 2
assert set((t["id"], t["color"]) for t in dashboard_json["tiles"]) == {
assert {(t["id"], t["color"]) for t in dashboard_json["tiles"]} == {
(tile_ids[0], "purple"),
(tile_ids[1], None),
}
Expand Down
4 changes: 2 additions & 2 deletions posthog/api/test/test_feature_flag.py
Original file line number Diff line number Diff line change
Expand Up @@ -1327,7 +1327,7 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture):
self.assertEqual(instance.key, "alpha-feature")

dashboard = instance.usage_dashboard
tiles = sorted([tile for tile in dashboard.tiles.all()], key=lambda x: x.insight.name)
tiles = sorted(dashboard.tiles.all(), key=lambda x: x.insight.name)

self.assertEqual(dashboard.name, "Generated Dashboard: alpha-feature Usage")
self.assertEqual(
Expand Down Expand Up @@ -1421,7 +1421,7 @@ def test_create_feature_flag_usage_dashboard(self, mock_capture):
instance.refresh_from_db()

dashboard = instance.usage_dashboard
tiles = sorted([tile for tile in dashboard.tiles.all()], key=lambda x: x.insight.name)
tiles = sorted(dashboard.tiles.all(), key=lambda x: x.insight.name)

self.assertEqual(dashboard.name, "Generated Dashboard: alpha-feature Usage")
self.assertEqual(
Expand Down
2 changes: 1 addition & 1 deletion posthog/api/test/test_organization_feature_flag.py
Original file line number Diff line number Diff line change
Expand Up @@ -606,7 +606,7 @@ def connect(parent, child):
original_cohorts_cache = {}
for _, cohort in cohorts.items():
original_cohorts_cache[cohort.id] = cohort
original_cohort_ids = {cohort_id for cohort_id in original_cohorts_cache.keys()}
original_cohort_ids = set(original_cohorts_cache.keys())
topologically_sorted_original_cohort_ids = sort_cohorts_topologically(
original_cohort_ids, original_cohorts_cache
)
Expand Down
2 changes: 1 addition & 1 deletion posthog/api/test/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -804,7 +804,7 @@ def test_transpile_plugin_frontend_source(self, mock_get, mock_reload):
)
try:
PluginSourceFile.objects.get(plugin_id=id)
assert False, "Should have thrown DoesNotExist"
raise AssertionError("Should have thrown DoesNotExist")
except PluginSourceFile.DoesNotExist:
assert True

Expand Down
12 changes: 5 additions & 7 deletions posthog/async_migrations/migrations/0002_events_sample_by.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,13 +186,11 @@ def healthcheck(self):

@cached_property
def _partitions(self):
return list(
sorted(
row[0]
for row in sync_execute(
f"SELECT DISTINCT toUInt32(partition) FROM system.parts WHERE database = %(database)s AND table='{EVENTS_TABLE}'",
{"database": CLICKHOUSE_DATABASE},
)
return sorted(
row[0]
for row in sync_execute(
f"SELECT DISTINCT toUInt32(partition) FROM system.parts WHERE database = %(database)s AND table='{EVENTS_TABLE}'",
{"database": CLICKHOUSE_DATABASE},
)
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,4 +91,4 @@ def migrate_team_operation(self, team_id: int):

@cached_property
def _team_ids(self):
return list(sorted(row[0] for row in sync_execute("SELECT DISTINCT team_id FROM person_distinct_id")))
return sorted(row[0] for row in sync_execute("SELECT DISTINCT team_id FROM person_distinct_id"))
2 changes: 1 addition & 1 deletion posthog/async_migrations/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def setup_async_migrations(ignore_posthog_version: bool = False):
4. Populate a dependencies map and in-memory record of migration definitions
"""

applied_migrations = set(instance.name for instance in get_all_completed_async_migrations())
applied_migrations = {instance.name for instance in get_all_completed_async_migrations()}
unapplied_migrations = set(ALL_ASYNC_MIGRATIONS.keys()) - applied_migrations

for migration_name, migration in ALL_ASYNC_MIGRATIONS.items():
Expand Down
2 changes: 1 addition & 1 deletion posthog/batch_exports/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ def sync_batch_export(batch_export: BatchExport, created: bool):
paused=batch_export.paused,
)

destination_config_fields = set(field.name for field in fields(workflow_inputs))
destination_config_fields = {field.name for field in fields(workflow_inputs)}
destination_config = {k: v for k, v in batch_export.destination.config.items() if k in destination_config_fields}

temporal = sync_connect()
Expand Down
2 changes: 1 addition & 1 deletion posthog/caching/test/test_insight_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def create_insight_caching_state(

# Reaching into the internals of LocMemCache
def cache_keys(cache):
return set(key.split(":", 2)[-1] for key in cache._cache.keys())
return {key.split(":", 2)[-1] for key in cache._cache.keys()}


@pytest.mark.django_db
Expand Down
4 changes: 2 additions & 2 deletions posthog/caching/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,12 @@ def active_teams() -> Set[int]:
return set()
redis.zadd(
RECENTLY_ACCESSED_TEAMS_REDIS_KEY,
{team: score for team, score in teams_by_recency},
dict(teams_by_recency),
)
redis.expire(RECENTLY_ACCESSED_TEAMS_REDIS_KEY, IN_A_DAY)
all_teams = teams_by_recency

return set(int(team_id) for team_id, _ in all_teams)
return {int(team_id) for team_id, _ in all_teams}


def stale_cache_invalidation_disabled(team: Team) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion posthog/clickhouse/system_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def system_status() -> Generator[SystemStatusRow, None, None]:
"value": "",
"subrows": {
"columns": ["Metric", "Value", "Description"],
"rows": list(sorted(system_metrics)),
"rows": sorted(system_metrics),
},
}

Expand Down
2 changes: 1 addition & 1 deletion posthog/demo/matrix/taxonomy_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ def infer_taxonomy_for_team(team_id: int) -> Tuple[int, int, int]:
def _get_events_last_seen_at(team_id: int) -> Dict[str, timezone.datetime]:
from posthog.client import sync_execute

return {event: last_seen_at for event, last_seen_at in sync_execute(_GET_EVENTS_LAST_SEEN_AT, {"team_id": team_id})}
return dict(sync_execute(_GET_EVENTS_LAST_SEEN_AT, {"team_id": team_id}))


def _get_property_types(team_id: int) -> Dict[str, Optional[PropertyType]]:
Expand Down
Loading

0 comments on commit 2172137

Please sign in to comment.